Compare commits
776 commits
1.0.x-stab
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
78c1a37f2e | ||
|
|
705f4d6f47 | ||
|
|
182a3a6f0b | ||
|
|
5852a5fe14 | ||
|
|
84170f7c51 | ||
|
|
9f86984925 | ||
|
|
92ea59ff3c | ||
|
|
8deba612bb | ||
|
|
74a13a3489 | ||
|
|
14fd0d9189 | ||
|
|
dfe29820e1 | ||
|
|
999f6ddd9b | ||
|
|
a70fd7936a | ||
|
|
62ef1d0727 | ||
|
|
54270ad887 | ||
|
|
008d1ff1b0 | ||
|
|
088b744991 | ||
|
|
94cf59a53e | ||
|
|
69bb24b3d9 | ||
|
|
32407c92f1 | ||
|
|
d8ce4ae787 | ||
|
|
dac9f83a55 | ||
|
|
9fb211e859 | ||
|
|
b5fd0a2738 | ||
|
|
30cd472e23 | ||
|
|
ed613dee94 | ||
|
|
801f08b936 | ||
|
|
4d8747f9ed | ||
|
|
ab878ab69c | ||
|
|
f30a3926e0 | ||
|
|
e7c98d66e4 | ||
|
|
82ec258f6d | ||
|
|
37c2d8433b | ||
|
|
0494128e15 | ||
|
|
9f3d192dff | ||
|
|
1fdd62d3df | ||
|
|
c2cbdcaa38 | ||
|
|
37aabbe860 | ||
|
|
010a977aac | ||
|
|
b6bd6e55e2 | ||
|
|
ce6dcd27cb | ||
|
|
a67f6de06b | ||
|
|
d1b77ee3fd | ||
|
|
affeb65d00 | ||
|
|
37816b77a1 | ||
|
|
ba29a45cc6 | ||
|
|
efc440a8f9 | ||
|
|
805383f1ec | ||
|
|
a3f6982a61 | ||
|
|
3a94b2a805 | ||
|
|
1e313f990c | ||
|
|
d6f5ac743d | ||
|
|
85c7b84d13 | ||
|
|
900592e302 | ||
|
|
8102c42888 | ||
|
|
9d34fc0231 | ||
|
|
aa08f4b58c | ||
|
|
4b591fc910 | ||
|
|
79bd0aabca | ||
|
|
6bf528ed5b | ||
|
|
318e440d0d | ||
|
|
3757c6d915 | ||
|
|
a153eb8116 | ||
|
|
8879e552ef | ||
|
|
386f06da8c | ||
|
|
ba00ba27a5 | ||
|
|
ad2ee531a3 | ||
|
|
60c922c0a5 | ||
|
|
b2068fd8bb | ||
|
|
595efb926f | ||
|
|
f6e5ff75ce | ||
|
|
216674cd3d | ||
|
|
f093322a09 | ||
|
|
26efe88510 | ||
|
|
da520112e7 | ||
|
|
7466db5c4d | ||
|
|
89e8538d91 | ||
|
|
a9e1f5d7d0 | ||
|
|
dc61982ced | ||
|
|
002b91d5e6 | ||
|
|
3196b66442 | ||
|
|
1a256a6033 | ||
|
|
20b4d5d93b | ||
|
|
c35545a97a | ||
|
|
15831490d9 | ||
|
|
79387a694a | ||
|
|
993bb038d9 | ||
|
|
5d31f9fc1a | ||
|
|
be43faf059 | ||
|
|
bcaf0837b4 | ||
|
|
319872d5bc | ||
|
|
6e333bb563 | ||
|
|
1ca1469126 | ||
|
|
f3d8a7bb71 | ||
|
|
99a113e220 | ||
|
|
c1acd78060 | ||
|
|
62dc2a9cd3 | ||
|
|
5b5425c402 | ||
|
|
34671dcd5d | ||
|
|
1170f342dd | ||
|
|
27ed7bdfb7 | ||
|
|
517bb98129 | ||
|
|
b1cfb5490a | ||
|
|
f499079bf7 | ||
|
|
0d7dc9357a | ||
|
|
82c76dd66d | ||
|
|
2021c6d07f | ||
|
|
3d4a38a10a | ||
|
|
8fd4946959 | ||
|
|
08ce1e41b3 | ||
|
|
a2bc59dcfa | ||
|
|
d0fc0ed528 | ||
|
|
57464ac592 | ||
|
|
0f2a44179d | ||
|
|
7721c83ccf | ||
|
|
c374e7593b | ||
|
|
3e5f16d146 | ||
|
|
be95971e58 | ||
|
|
2f19f62e3f | ||
|
|
668e46d57f | ||
|
|
df09c4085d | ||
|
|
5033018119 | ||
|
|
ccffffc912 | ||
|
|
9be707ea06 | ||
|
|
2b4c0f8168 | ||
|
|
f64d085b0f | ||
|
|
5916642b2b | ||
|
|
07b63150d1 | ||
|
|
e842068f3e | ||
|
|
9d51f32fa1 | ||
|
|
765b9d1acf | ||
|
|
c54f70350e | ||
|
|
c7a87ac776 | ||
|
|
bdecd98b40 | ||
|
|
189d347f50 | ||
|
|
114630f769 | ||
|
|
6c25763748 | ||
|
|
fdb354b2db | ||
|
|
45cb68115b | ||
|
|
dd890231ee | ||
|
|
1764b898bd | ||
|
|
b0df70f279 | ||
|
|
85560e17f8 | ||
|
|
57fb96a1b7 | ||
|
|
47d20761c5 | ||
|
|
50a8963ba2 | ||
|
|
2856631638 | ||
|
|
1299e5e5ff | ||
|
|
cf3d8f2ad3 | ||
|
|
15edf63ffd | ||
|
|
c26ae0835d | ||
|
|
fe73144075 | ||
|
|
2dc5e92b0c | ||
|
|
286493463b | ||
|
|
aec51c03f4 | ||
|
|
7195f77945 | ||
|
|
b59ccb4087 | ||
|
|
9178df179d | ||
|
|
8cefa7fc9e | ||
|
|
45599a4f51 | ||
|
|
4e9f041ce9 | ||
|
|
f24ee77730 | ||
|
|
9928652b79 | ||
|
|
56d16a4cee | ||
|
|
a500638891 | ||
|
|
8bce5ae8ed | ||
|
|
2dc924faf4 | ||
|
|
1f395c4e41 | ||
|
|
bfac6bbe3a | ||
|
|
def4ce3e6b | ||
|
|
504a358162 | ||
|
|
5898cfb3bc | ||
|
|
cf86245526 | ||
|
|
8316e7798e | ||
|
|
5f53732719 | ||
|
|
ee0c3dbede | ||
|
|
ebc01b3f54 | ||
|
|
f1fff71577 | ||
|
|
e0c632f0d3 | ||
|
|
f3cff7a52a | ||
|
|
fc4a34b87f | ||
|
|
76cd30d79c | ||
|
|
6c1489e1ef | ||
|
|
d9b103c350 | ||
|
|
43994ff19a | ||
|
|
95d0145a0b | ||
|
|
7e111a332e | ||
|
|
800d02839c | ||
|
|
19613b6e01 | ||
|
|
ec5590fd56 | ||
|
|
c1597b1c9a | ||
|
|
478efa092d | ||
|
|
ac90956f92 | ||
|
|
1da1dab013 | ||
|
|
80cadd54c4 | ||
|
|
b4ea572293 | ||
|
|
ae2ac84921 | ||
|
|
0891a9ded7 | ||
|
|
51baa3e154 | ||
|
|
1717bdac9e | ||
|
|
7f74d16a36 | ||
|
|
2cd8c0c6a0 | ||
|
|
ceed6f85c5 | ||
|
|
4ff7d05aa4 | ||
|
|
b762c1875f | ||
|
|
15f50408a9 | ||
|
|
f0946acd75 | ||
|
|
136dea00b2 | ||
|
|
3a4de68dd4 | ||
|
|
9c0f385ab2 | ||
|
|
d337cd1546 | ||
|
|
94029c7f6c | ||
|
|
aeab2851c1 | ||
|
|
126c9f8503 | ||
|
|
6e41cb94d8 | ||
|
|
3edc8049db | ||
|
|
e76dc21182 | ||
|
|
d80a2766ee | ||
|
|
2eb6f80ae8 | ||
|
|
eb168b6830 | ||
|
|
4701a7b5ab | ||
|
|
a2b1ddb054 | ||
|
|
4e0a0036e6 | ||
|
|
5651c2ee97 | ||
|
|
3660992148 | ||
|
|
602b6e3ce9 | ||
|
|
3e71b6ffc7 | ||
|
|
4efdaa28c9 | ||
|
|
8b2fd956e4 | ||
|
|
6ca5b9d4ba | ||
|
|
a8d42a62f5 | ||
|
|
a107f1f8b7 | ||
|
|
3c63209ab1 | ||
|
|
05dd57731a | ||
|
|
26be7bb1b9 | ||
|
|
2be051289f | ||
|
|
096ba46bcd | ||
|
|
07a9c5ee16 | ||
|
|
49cb195e5a | ||
|
|
7c31e98cf3 | ||
|
|
0b72761b44 | ||
|
|
8511d3714d | ||
|
|
d9e6be671a | ||
|
|
2128f2cd6c | ||
|
|
bb626c5320 | ||
|
|
25c1c56fc9 | ||
|
|
e7755b3fc5 | ||
|
|
cd4326ca69 | ||
|
|
9cb54167c4 | ||
|
|
732bdb40e0 | ||
|
|
2d58b02b13 | ||
|
|
7bf5956565 | ||
|
|
d0d922e78c | ||
|
|
6b6876186b | ||
|
|
1a30ef4247 | ||
|
|
916d83b705 | ||
|
|
6fa11f9b76 | ||
|
|
1215f0335a | ||
|
|
80773ad0af | ||
|
|
3389e84bb2 | ||
|
|
e6dfd9dd5e | ||
|
|
d63982491b | ||
|
|
fb00629345 | ||
|
|
cd7e7db809 | ||
|
|
e3189c33c7 | ||
|
|
0495d74a34 | ||
|
|
a1f3566632 | ||
|
|
78ea72a33b | ||
|
|
5c39babf69 | ||
|
|
361d293a93 | ||
|
|
049fa620c8 | ||
|
|
194f0028a4 | ||
|
|
1c62587165 | ||
|
|
696a5ad8e3 | ||
|
|
b0accb8447 | ||
|
|
2c05d4c9d5 | ||
|
|
d8e70a5ee8 | ||
|
|
30fececda4 | ||
|
|
1f0f97869f | ||
|
|
ea158554fc | ||
|
|
00dad8d0fb | ||
|
|
0312ecd2f6 | ||
|
|
ef539f48c8 | ||
|
|
1ac5086b17 | ||
|
|
2c019e5e91 | ||
|
|
a8a9ab16b6 | ||
|
|
8addd14c6d | ||
|
|
bf5ded8939 | ||
|
|
c0fb8c517b | ||
|
|
2234f0fb22 | ||
|
|
af6f300720 | ||
|
|
bcea07dff4 | ||
|
|
b0d8996576 | ||
|
|
e73372bc1a | ||
|
|
cac5ee4695 | ||
|
|
f846cd0a51 | ||
|
|
59427e29bb | ||
|
|
e8b5ba5dd6 | ||
|
|
417763782e | ||
|
|
21174924c3 | ||
|
|
b1049021c0 | ||
|
|
37e94ebcd5 | ||
|
|
081a7fc3d6 | ||
|
|
96f7e67489 | ||
|
|
40bc1861a7 | ||
|
|
57dbfd0e56 | ||
|
|
cb6210d573 | ||
|
|
53ce5808c4 | ||
|
|
3f3a6420e5 | ||
|
|
6330cee24a | ||
|
|
96f05c7c01 | ||
|
|
a259bd3e00 | ||
|
|
2e1ad71d08 | ||
|
|
646f4e1bd1 | ||
|
|
6206308fa8 | ||
|
|
da2effbfc9 | ||
|
|
31ee101d5d | ||
|
|
ad25fba0f0 | ||
|
|
b86ecb5487 | ||
|
|
5215574377 | ||
|
|
25f6dcf88d | ||
|
|
3fda07999d | ||
|
|
6f93043eab | ||
|
|
d7965adb86 | ||
|
|
9a185596ac | ||
|
|
31ebc18c12 | ||
|
|
eaa66edc93 | ||
|
|
03e7bfa1c7 | ||
|
|
431e4495af | ||
|
|
b3c501a73a | ||
|
|
c29587f66d | ||
|
|
6acfb43314 | ||
|
|
f02a2b0afc | ||
|
|
d7902c9618 | ||
|
|
565ec43398 | ||
|
|
60a419953f | ||
|
|
c64f5a66d9 | ||
|
|
9752950d82 | ||
|
|
37e8df8300 | ||
|
|
f0722561c3 | ||
|
|
c75b02814d | ||
|
|
eddc8b20bd | ||
|
|
353df3b841 | ||
|
|
a3954f3847 | ||
|
|
58d426d6aa | ||
|
|
5fb665e5eb | ||
|
|
b1e874aa87 | ||
|
|
c6ef174535 | ||
|
|
56eebda3fd | ||
|
|
a7cceebe9c | ||
|
|
8b237db382 | ||
|
|
42ca0efa5c | ||
|
|
912b9e4d45 | ||
|
|
d3cb048d2b | ||
|
|
d114073413 | ||
|
|
ab4a9a381f | ||
|
|
d99b6c52aa | ||
|
|
d76127de49 | ||
|
|
e04c694e3c | ||
|
|
b0cd0bf671 | ||
|
|
9ad38a9e7a | ||
|
|
04e6c5bce6 | ||
|
|
1e9463e466 | ||
|
|
7766475e52 | ||
|
|
5b1d063301 | ||
|
|
1cae021b47 | ||
|
|
9c2aa45b75 | ||
|
|
d78c0d45c8 | ||
|
|
4b61275814 | ||
|
|
6a15f6959e | ||
|
|
58015b4f40 | ||
|
|
c0ef0ab7c8 | ||
|
|
a7ccfbecab | ||
|
|
ca40889348 | ||
|
|
63168d68f5 | ||
|
|
1991308567 | ||
|
|
bf0f20e0d9 | ||
|
|
a0e4c9ea93 | ||
|
|
2d77d1ba27 | ||
|
|
e240e906b0 | ||
|
|
8a4c497ea7 | ||
|
|
de377f3849 | ||
|
|
ed23f7d9fc | ||
|
|
31bcb51ee4 | ||
|
|
b86cabdc1a | ||
|
|
373c4db7a4 | ||
|
|
6d3b2be339 | ||
|
|
6fd331da15 | ||
|
|
7bb90ac86a | ||
|
|
2e64a24cc7 | ||
|
|
87637dd6fe | ||
|
|
f413016ef0 | ||
|
|
6009e6e719 | ||
|
|
209272fb6c | ||
|
|
9b5f8de1f3 | ||
|
|
eaf07fcf41 | ||
|
|
18ce852146 | ||
|
|
db6715b282 | ||
|
|
f5d87d0aaa | ||
|
|
555ac53d22 | ||
|
|
413d4484b8 | ||
|
|
4b417ec15a | ||
|
|
3728f38ad7 | ||
|
|
4c1a4f9505 | ||
|
|
b66abf5a2c | ||
|
|
e362d32b3a | ||
|
|
4dac6375ed | ||
|
|
5d5c5dbced | ||
|
|
b4472e7167 | ||
|
|
84e76c1e30 | ||
|
|
c334f206ed | ||
|
|
5a88eb89f7 | ||
|
|
01e3ad7977 | ||
|
|
6671cb123f | ||
|
|
f04638348c | ||
|
|
248e177a6c | ||
|
|
60bb6480b4 | ||
|
|
7bdc3994dd | ||
|
|
2d1ab796ba | ||
|
|
3c951a0828 | ||
|
|
0c1fec027d | ||
|
|
276f0f81d9 | ||
|
|
f1f46b6983 | ||
|
|
6758a1d102 | ||
|
|
4b6ddc5289 | ||
|
|
d3aedaec03 | ||
|
|
286a9dc10e | ||
|
|
bb058bf19a | ||
|
|
05e7d3727d | ||
|
|
e065db51d3 | ||
|
|
5022eeaf4c | ||
|
|
2bafefd3f3 | ||
|
|
b2e34e1f57 | ||
|
|
aa0347cd2e | ||
|
|
a6912c9db1 | ||
|
|
962e4c8313 | ||
|
|
371a209ae3 | ||
|
|
06be43a8c1 | ||
|
|
409b13a9fb | ||
|
|
04d92f35cd | ||
|
|
7d41ffc47c | ||
|
|
bd575126a3 | ||
|
|
c163283d22 | ||
|
|
47efed8260 | ||
|
|
047dfeaafa | ||
|
|
78eaeebae8 | ||
|
|
3ceecf4e0c | ||
|
|
6db4eab95c | ||
|
|
daace7283c | ||
|
|
e37872bbca | ||
|
|
c2a13a1645 | ||
|
|
cb47b7541e | ||
|
|
ebe63865bd | ||
|
|
131437d28f | ||
|
|
776f33a83e | ||
|
|
befb852659 | ||
|
|
c5a3c583f2 | ||
|
|
94d2480b9e | ||
|
|
0522b2cf71 | ||
|
|
b5748782ef | ||
|
|
dd1e475ea4 | ||
|
|
5e51a78d6a | ||
|
|
c76ea84412 | ||
|
|
d2f334ca85 | ||
|
|
7810732608 | ||
|
|
ff2e6cc220 | ||
|
|
8fb51475f0 | ||
|
|
3083a18191 | ||
|
|
c81026e308 | ||
|
|
568ba283c8 | ||
|
|
0755c0c4b1 | ||
|
|
0f773b8dfe | ||
|
|
938c440764 | ||
|
|
ea5e1a3b9c | ||
|
|
d380321e2e | ||
|
|
8837fb46b9 | ||
|
|
e540d453a0 | ||
|
|
12c33ac975 | ||
|
|
3d89d02e9d | ||
|
|
496435c350 | ||
|
|
b8d5b27054 | ||
|
|
5c82651bde | ||
|
|
cbf198c8bd | ||
|
|
5d67b374fc | ||
|
|
61a76bfd1c | ||
|
|
c23e019ccd | ||
|
|
30da676626 | ||
|
|
109aef0645 | ||
|
|
41375af05f | ||
|
|
19ab7eb8a7 | ||
|
|
118a6aa872 | ||
|
|
6ba204a602 | ||
|
|
02121379e7 | ||
|
|
daa8361c91 | ||
|
|
56bcf8c209 | ||
|
|
a2beed97e7 | ||
|
|
4201107a0a | ||
|
|
23300cf73c | ||
|
|
de4eb78d83 | ||
|
|
d4ea338f19 | ||
|
|
0cbde38d77 | ||
|
|
4e0548c151 | ||
|
|
1e9f915921 | ||
|
|
db79ec0079 | ||
|
|
18d924317d | ||
|
|
635436a438 | ||
|
|
2d43975ce3 | ||
|
|
6cd615ea97 | ||
|
|
790fc63f7c | ||
|
|
08887043db | ||
|
|
01ae26ff2b | ||
|
|
af262d3b82 | ||
|
|
490aaa3dad | ||
|
|
57348f0650 | ||
|
|
c110553be9 | ||
|
|
646e75ddf4 | ||
|
|
eb0a257d8f | ||
|
|
3e6d65e837 | ||
|
|
e4ec4fc351 | ||
|
|
bba5cc50c6 | ||
|
|
b476562edc | ||
|
|
47b14ee8a1 | ||
|
|
251f6fc765 | ||
|
|
0dca29285c | ||
|
|
19d0058c53 | ||
|
|
141380dd26 | ||
|
|
1da5f64b91 | ||
|
|
9760e88b66 | ||
|
|
a2ff58f4f4 | ||
|
|
b24517e648 | ||
|
|
65e6310758 | ||
|
|
a468f75a7c | ||
|
|
bb5a57b468 | ||
|
|
b72968dfb9 | ||
|
|
4371e64d86 | ||
|
|
0e6685ee28 | ||
|
|
19d2d97d78 | ||
|
|
3d120456ff | ||
|
|
7e7f3aba4c | ||
|
|
2dca992496 | ||
|
|
b1f4e09a7c | ||
|
|
bc1408806f | ||
|
|
5046d0abc4 | ||
|
|
7890c94496 | ||
|
|
58b5b1ddac | ||
|
|
7570e4ce6d | ||
|
|
b57c97a962 | ||
|
|
7b3869ec04 | ||
|
|
3a30373d94 | ||
|
|
ca6a128c91 | ||
|
|
8d53ca73e7 | ||
|
|
a3c5159223 | ||
|
|
41089c1da8 | ||
|
|
a31097fc86 | ||
|
|
3b846551a3 | ||
|
|
3f9cb9bb59 | ||
|
|
bed9b035b7 | ||
|
|
e9898cdbbe | ||
|
|
679fd2bf92 | ||
|
|
957650dfe3 | ||
|
|
3801c37f51 | ||
|
|
182a82315f | ||
|
|
adc5038259 | ||
|
|
ea493eee1d | ||
|
|
b58a812c18 | ||
|
|
78c9336ad7 | ||
|
|
f37db1fa84 | ||
|
|
a33128791b | ||
|
|
080c17f175 | ||
|
|
cabd7178aa | ||
|
|
0179bbc046 | ||
|
|
a04596cdc5 | ||
|
|
54f204b1a5 | ||
|
|
a6606deb05 | ||
|
|
8ea1681519 | ||
|
|
67111ce764 | ||
|
|
a53ee10e41 | ||
|
|
8c58ab0155 | ||
|
|
c0f1112d34 | ||
|
|
0a03ec1bf8 | ||
|
|
41c2aa7488 | ||
|
|
fd5e041099 | ||
|
|
6a9cb9f1a3 | ||
|
|
0d635721a1 | ||
|
|
7cc7b3a2d5 | ||
|
|
4137f4a6f9 | ||
|
|
3bc3ee6d6a | ||
|
|
af19e62479 | ||
|
|
4ac8f8330a | ||
|
|
48be8602e3 | ||
|
|
2b26d74d9b | ||
|
|
957623094b | ||
|
|
0a386750c3 | ||
|
|
a39c13a574 | ||
|
|
bdf0082372 | ||
|
|
c0a83e7241 | ||
|
|
0bee6bed9e | ||
|
|
f40161f035 | ||
|
|
6794759da1 | ||
|
|
0bd3ef12ab | ||
|
|
d5b0e2c8be | ||
|
|
bffb58cc1f | ||
|
|
c001ab3610 | ||
|
|
85d392b4c7 | ||
|
|
9c529cd6c7 | ||
|
|
b7660bec6d | ||
|
|
496325a417 | ||
|
|
51bb055936 | ||
|
|
9373550439 | ||
|
|
aff15fdf9a | ||
|
|
49a672946d | ||
|
|
879a0232b0 | ||
|
|
b7604e44ab | ||
|
|
27773f0f76 | ||
|
|
c7f9f09278 | ||
|
|
9e99933c53 | ||
|
|
6b9c5d8f3d | ||
|
|
006e89d74a | ||
|
|
848bbd07d3 | ||
|
|
2a34be05ad | ||
|
|
0c25f254b2 | ||
|
|
7e090f45d9 | ||
|
|
1755c977ba | ||
|
|
3a2afd0cec | ||
|
|
e8e9147e85 | ||
|
|
e20f338505 | ||
|
|
e9fa768fc6 | ||
|
|
2012fed7c4 | ||
|
|
3af05ef74c | ||
|
|
082a3ec7b7 | ||
|
|
66b94018a6 | ||
|
|
6e53b4cba6 | ||
|
|
6b0f0f8313 | ||
|
|
c933d66b2d | ||
|
|
5d97bf7e4c | ||
|
|
ec9c57f467 | ||
|
|
1f53145523 | ||
|
|
2c5deb7e59 | ||
|
|
43e40f5f14 | ||
|
|
62013fd070 | ||
|
|
d647542d86 | ||
|
|
885a805594 | ||
|
|
7ec9288ecd | ||
|
|
f9923c7134 | ||
|
|
e7c70b8676 | ||
|
|
496bc127d8 | ||
|
|
945c71ba56 | ||
|
|
beaea0bc02 | ||
|
|
a21fa90834 | ||
|
|
abb82e0b03 | ||
|
|
8fd78c3a19 | ||
|
|
80a4625d89 | ||
|
|
a121598651 | ||
|
|
175f17af97 | ||
|
|
c6b5f9751e | ||
|
|
8c17da69b5 | ||
|
|
4bbb26b8dc | ||
|
|
6194a1604c | ||
|
|
4182a2ab8e | ||
|
|
abc0f2dcbe | ||
|
|
4a41b34f06 | ||
|
|
308124ba97 | ||
|
|
7ed7e8ae0d | ||
|
|
f8a3fe1982 | ||
|
|
18f2089617 | ||
|
|
9c490ebbc1 | ||
|
|
8faf9aec82 | ||
|
|
fe0a885d79 | ||
|
|
d3114be395 | ||
|
|
20faaf2fe0 | ||
|
|
64bf2eff7f | ||
|
|
28c29a7bae | ||
|
|
d8ff844bfa | ||
|
|
e4238710f3 | ||
|
|
7a87332537 | ||
|
|
f4b4bb0849 | ||
|
|
5d39787825 | ||
|
|
1ae91971d6 | ||
|
|
09e8adb7bb | ||
|
|
c5e2c78ed0 | ||
|
|
47aabed2e4 | ||
|
|
cca473eca3 | ||
|
|
c8745ccdb8 | ||
|
|
3123ce3684 | ||
|
|
19e0d08269 | ||
|
|
466f09f894 | ||
|
|
cd67e7c6b3 | ||
|
|
adfce471a1 | ||
|
|
54c8c5f791 | ||
|
|
1f0414e06b | ||
|
|
9fe01a7f58 | ||
|
|
2818ac6c26 | ||
|
|
2fb74c6aff | ||
|
|
53fabbab73 | ||
|
|
1a351cff88 | ||
|
|
1d179d9185 | ||
|
|
76b3371420 | ||
|
|
113ee02048 | ||
|
|
c8e17e2c7a | ||
|
|
7095955555 | ||
|
|
07b76a307c | ||
|
|
c3b5759ce4 | ||
|
|
1fcac25c17 | ||
|
|
33b8f54460 | ||
|
|
23a55420df | ||
|
|
cf86bd4ab6 | ||
|
|
d0b5a412c9 | ||
|
|
53a0ef78fd | ||
|
|
a5ec7c5662 | ||
|
|
708ea6869d | ||
|
|
e3ee17dee7 | ||
|
|
b30289f233 | ||
|
|
2d3fce177f | ||
|
|
caf7a2ca3a | ||
|
|
30d3056f90 | ||
|
|
01e9abb19d | ||
|
|
430ca21d7c | ||
|
|
1a878b4c80 | ||
|
|
bc825e4343 | ||
|
|
0df0b706df | ||
|
|
0f0e8d505d | ||
|
|
ef2a92f980 | ||
|
|
8fdb7ee674 | ||
|
|
7eea7e229b | ||
|
|
89e9dc8ed9 | ||
|
|
d5f3de54bb | ||
|
|
56feb5637e | ||
|
|
d34daeb33e | ||
|
|
57486b1ed0 | ||
|
|
d8fe17d2f5 | ||
|
|
d58434e2ea | ||
|
|
79d6e1cd74 | ||
|
|
46ad8d1877 | ||
|
|
f374bc53ba | ||
|
|
65240d1f14 | ||
|
|
fd832a70eb | ||
|
|
656ba51c26 | ||
|
|
9452862f0c | ||
|
|
1b6cae729b | ||
|
|
8fac41f115 | ||
|
|
2a1e52fc1e | ||
|
|
8bd2469465 | ||
|
|
5b60c3690e | ||
|
|
4941ef63ee | ||
|
|
75e7452143 | ||
|
|
12f6c6cef2 | ||
|
|
c1b86756c8 | ||
|
|
4ed580a906 | ||
|
|
21d7481f42 | ||
|
|
a7b7925495 | ||
|
|
2fefd1a77e | ||
|
|
1064f29d15 | ||
|
|
080ef6b896 | ||
|
|
8f8b4387b6 | ||
|
|
f89991f43b | ||
|
|
2778710cc1 | ||
|
|
e4507f42eb | ||
|
|
db8f29ce5a | ||
|
|
9a29881583 | ||
|
|
3b0e0421c2 | ||
|
|
f194dc9cae | ||
|
|
1b69e0644b | ||
|
|
a21267cf41 | ||
|
|
5a631d25c4 | ||
|
|
8ca41347ad | ||
|
|
dde4a59495 | ||
|
|
ff77d3ccd7 | ||
|
|
2fcdfab8ba | ||
|
|
5e4d8d7c5a | ||
|
|
a5514c2cf6 | ||
|
|
6c7bad518c | ||
|
|
80771cb16f | ||
|
|
43349f65ae | ||
|
|
828e7981f0 | ||
|
|
a789b97a43 | ||
|
|
8cb32d761c |
70 changed files with 5402 additions and 3795 deletions
13
.gitignore
vendored
13
.gitignore
vendored
|
|
@ -1,9 +1,16 @@
|
||||||
pom.xml
|
pom.xml*
|
||||||
*jar
|
*jar
|
||||||
/lib/
|
/lib/
|
||||||
/classes/
|
/classes/
|
||||||
.lein-failures
|
.lein-*
|
||||||
.lein-deps-sum
|
|
||||||
TAGS
|
TAGS
|
||||||
checkouts/*
|
checkouts/*
|
||||||
doc/*
|
doc/*
|
||||||
|
deploy.docs.sh
|
||||||
|
target/*
|
||||||
|
todo.org
|
||||||
|
.nrepl-*
|
||||||
|
.idea/
|
||||||
|
*.iml
|
||||||
|
/.clj-kondo/.cache
|
||||||
|
/.lsp/.cache
|
||||||
|
|
|
||||||
21
.travis.yml
21
.travis.yml
|
|
@ -1,9 +1,20 @@
|
||||||
language: clojure
|
language: clojure
|
||||||
lein: lein2
|
sudo: required
|
||||||
|
lein: lein
|
||||||
|
dist: xenial
|
||||||
before_script:
|
before_script:
|
||||||
|
# Give MongoDB server some time to boot
|
||||||
|
- sleep 15
|
||||||
|
- mongod --version
|
||||||
- ./bin/ci/before_script.sh
|
- ./bin/ci/before_script.sh
|
||||||
script: lein2 javac && lein2 ci test
|
script: lein do clean, javac, test
|
||||||
jdk:
|
jdk:
|
||||||
- openjdk6
|
- openjdk10
|
||||||
- openjdk7
|
- oraclejdk11
|
||||||
- oraclejdk7
|
- openjdk12
|
||||||
|
services:
|
||||||
|
- mongodb
|
||||||
|
branches:
|
||||||
|
only:
|
||||||
|
- master
|
||||||
|
- 3.5.x-stable
|
||||||
|
|
|
||||||
13
CONTRIBUTING.md
Normal file
13
CONTRIBUTING.md
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
## Pre-requisites
|
||||||
|
|
||||||
|
The project uses [Leiningen 2](http://leiningen.org) and requires a recent MongoDB to be running
|
||||||
|
locally. Make sure you have those two installed and then run tests against all supported Clojure versions using
|
||||||
|
|
||||||
|
./bin/ci/before_script.sh
|
||||||
|
lein all do clean, javac, test
|
||||||
|
|
||||||
|
|
||||||
|
## Pull Requests
|
||||||
|
|
||||||
|
Then create a branch and make your changes on it. Once you are done with your changes and all
|
||||||
|
tests pass, write a [good, detailed commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html) submit a pull request on GitHub.
|
||||||
698
ChangeLog.md
698
ChangeLog.md
|
|
@ -1,13 +1,694 @@
|
||||||
## Changes between 1.0.1 and 1.0.2
|
## Changes between 3.5.x and 3.6.0 (unreleased)
|
||||||
|
|
||||||
No changes yet.
|
### UUID Representation Option
|
||||||
|
|
||||||
|
Added a new connection option, `:uuid-representation`.
|
||||||
|
|
||||||
|
Contributed by @okorz001.
|
||||||
|
|
||||||
|
GitHub issue: [#212](https://github.com/michaelklishin/monger/issues/212)
|
||||||
|
|
||||||
|
### Operator List Update
|
||||||
|
|
||||||
|
For MongoDB 4.x.
|
||||||
|
|
||||||
|
Contributed by @mjrb.
|
||||||
|
|
||||||
|
GitHub issue: [#196](https://github.com/michaelklishin/monger/pull/196)
|
||||||
|
|
||||||
|
### Dependency Update
|
||||||
|
|
||||||
|
Contributed by @robhanlon22.
|
||||||
|
|
||||||
|
GitHub issue: [#206](https://github.com/michaelklishin/monger/pull/206)
|
||||||
|
|
||||||
|
|
||||||
## Changes between 1.0.0 and 1.0.1
|
## Changes between 3.1.x and 3.5.0 (Dec 10th, 2018)
|
||||||
|
|
||||||
|
### MongoDB Java Driver Update
|
||||||
|
|
||||||
|
MongoDB Java driver dependency has been updated to `3.9.x`.
|
||||||
|
|
||||||
|
This means that Monger now **requires JDK 8**.
|
||||||
|
|
||||||
|
Contributed by @Linicks.
|
||||||
|
|
||||||
|
### 3rd Party Library Compatibility
|
||||||
|
|
||||||
|
* Cheshire `5.8.x`
|
||||||
|
* clj-time `0.15.1`
|
||||||
|
* ring-core `0.15.1`
|
||||||
|
* Ragtime `0.7.x`.
|
||||||
|
|
||||||
|
### URI Connection Usability Improvement
|
||||||
|
|
||||||
|
URIs that don't specify a database will now be rejected as invalid.
|
||||||
|
|
||||||
|
Contributed by Chris Broome.
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 3.0.x and 3.1.0 (September 17th, 2016)
|
||||||
|
|
||||||
|
### MongoDB Java Driver Update
|
||||||
|
|
||||||
|
MongoDB Java driver dependency has been updated to `3.3.0`.
|
||||||
|
|
||||||
|
### Cursor Hinting Option Fix
|
||||||
|
|
||||||
|
Contributed by Stijn Opheide.
|
||||||
|
|
||||||
|
### Improved DBObject to Clojure Map conversion performance
|
||||||
|
|
||||||
|
New `from-db-object` implementation for `DBObject` avoids creation of an unnecessary
|
||||||
|
sequence and instead directly accesses `DBObject` instance in reduce. This should
|
||||||
|
offer performance improvement of about 20%. A performance test can be found
|
||||||
|
at [monger.test.stress-test](https://github.com/michaelklishin/monger/blob/master/test/monger/test/stress_test.clj).
|
||||||
|
|
||||||
|
Contributed by Juho Teperi.
|
||||||
|
|
||||||
|
### Authencation Function No Longer Ignores Credentials
|
||||||
|
|
||||||
|
In some cases Monger ignored provided credentials.
|
||||||
|
|
||||||
|
Contributed by Artem Chistyakov.
|
||||||
|
|
||||||
|
### Macro Type Hint Fixes
|
||||||
|
|
||||||
|
Contributed by Andre Ambrosio Boechat.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 2.1.0 and 3.0.0
|
||||||
|
|
||||||
|
Monger 3.0 is based on the [MongoDB Java driver 3.0](https://www.mongodb.com/blog/post/introducing-30-java-driver)
|
||||||
|
and has some (relatively minor) **breaking API changes**.
|
||||||
|
|
||||||
|
### Error Handling Built Around Write Concerns
|
||||||
|
|
||||||
|
Monger no longer provides `monger.core/get-last-error`. It is no
|
||||||
|
longer needed: write concerns and exceptions is now the primary way for clients
|
||||||
|
to be notified of operation failures.
|
||||||
|
|
||||||
|
### New Authentication API
|
||||||
|
|
||||||
|
MongoDB 3.0 supports different authentication mechanisms. Multiple
|
||||||
|
credentials can be specified for a single connection. The client
|
||||||
|
and the server then can negotiate what authentication mechanism to use
|
||||||
|
and which set of credentials succeed.
|
||||||
|
|
||||||
|
Monger introduces a new namespace for credential instantiation:
|
||||||
|
`monger.credentials`. The most common function that relies on
|
||||||
|
authentication mechanism negotiation is `monger.credentials/for`:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(require '[monger.core :as mg])
|
||||||
|
(require '[monger.credentials :as mcr])
|
||||||
|
|
||||||
|
(let [creds (mcr/for "username" "db-name" "pa$$w0rd")
|
||||||
|
conn (mg/connect-with-credentials "127.0.0.1" creds)]
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
`mg/connect-with-credentials` is the most convenient function to
|
||||||
|
connect with if you plan on using authentication.
|
||||||
|
|
||||||
|
When connecting using a URI, the API hasn't changed.
|
||||||
|
|
||||||
|
### monger.search is Gone
|
||||||
|
|
||||||
|
`monger.search` is gone. MongoDB 3.0 supports search queries
|
||||||
|
using regular query operators, namely `$text`. `monger.operators` is
|
||||||
|
extended to include `$text`, `$search`, `$language`, and `$natural`.
|
||||||
|
|
||||||
|
An example of a search query in 3.0:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(require '[monger.core :as mg])
|
||||||
|
(require '[monger.credentials :as mcr])
|
||||||
|
(require '[monger.collection :as mc])
|
||||||
|
(require '[monger.operators :refer [$text $search]])
|
||||||
|
|
||||||
|
(let [creds (mcr/for "username" "db-name" "pa$$w0rd")
|
||||||
|
conn (mg/connect-with-credentials "127.0.0.1" creds)
|
||||||
|
db (mg/get-db conn "db-name")]
|
||||||
|
(mc/find-maps db "collection" {$text {$search "hello"}}))
|
||||||
|
```
|
||||||
|
|
||||||
|
### Add allow-disk-use and Cursor Options to Aggregates
|
||||||
|
|
||||||
|
`monger.collection/aggregate` now supports `:cursor` and `:allow-disk-use` options.
|
||||||
|
|
||||||
|
Contributed by Bartek Marcinowski.
|
||||||
|
|
||||||
|
|
||||||
|
### JSON Serialization of BSON Timestamps
|
||||||
|
|
||||||
|
JSON serialisation extensions now support BSON timestamps.
|
||||||
|
|
||||||
|
Contributed by Tom McMillen.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 2.0.0 and 2.1.0
|
||||||
|
|
||||||
|
### Clojure 1.7 Compatibility
|
||||||
|
|
||||||
|
Monger now compiles with Clojure 1.7.
|
||||||
|
|
||||||
|
### MongoDB Java Driver Update
|
||||||
|
|
||||||
|
MongoDB Java driver dependency has been updated to `2.13.x`.
|
||||||
|
|
||||||
|
### $each Operator
|
||||||
|
|
||||||
|
The `$each` operator now can be used via `monger.operators`.
|
||||||
|
|
||||||
|
Contributed by Juha Jokimäki.
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.8.0 and 2.0.0
|
||||||
|
|
||||||
|
`2.0` is a major release that has **breaking public API changes**.
|
||||||
|
|
||||||
|
### Explicit Connection/DB/GridFS Argument
|
||||||
|
|
||||||
|
In Monger 2.0, all key public API functions require an explicit
|
||||||
|
DB/connection/GridFS object to be provided instead of relying on
|
||||||
|
a shared dynamic var. This makes Monger much easier to use with
|
||||||
|
systems such as Component and Jig, as well as concurrent
|
||||||
|
applications that need to work with multiple connections, database,
|
||||||
|
or GridFS filesystems.
|
||||||
|
|
||||||
|
In other words, instead of
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(require '[monger.collection :as mc])
|
||||||
|
|
||||||
|
(mc/insert "libraries" {:name "Monger"})
|
||||||
|
```
|
||||||
|
|
||||||
|
it is now necessary to do
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(require '[monger.collection :as mc])
|
||||||
|
|
||||||
|
(mc/insert db "libraries" {:name "Monger"})
|
||||||
|
```
|
||||||
|
|
||||||
|
This also means that `monger.core/connect!` and
|
||||||
|
`monger.core/connect-via-uri!` were removed, as was
|
||||||
|
`monger.multi` namespaces.
|
||||||
|
|
||||||
|
To connect to MongoDB, use `monger.core/connect`:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(require '[monger.core :as mg])
|
||||||
|
|
||||||
|
(let [conn (mg/connect)])
|
||||||
|
```
|
||||||
|
|
||||||
|
or `monger.core/connect-via-uri`:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(require '[monger.core :as mg])
|
||||||
|
|
||||||
|
(let [{:keys [conn db]} (mg/connect-via-uri "mongodb://clojurewerkz/monger:monger@127.0.0.1/monger-test4")])
|
||||||
|
```
|
||||||
|
|
||||||
|
To get a database reference, use `monger.core/get-db`, which now requires a connection
|
||||||
|
object:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(require '[monger.core :as mg])
|
||||||
|
|
||||||
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")])
|
||||||
|
```
|
||||||
|
|
||||||
|
### Options as Maps
|
||||||
|
|
||||||
|
Functions that take options now require a proper Clojure map instead of
|
||||||
|
pseudo keyword arguments:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
# in Monger 1.x
|
||||||
|
(mc/update db coll {} {:score 0} :multi true)
|
||||||
|
|
||||||
|
# in Monger 2.x
|
||||||
|
(mc/update db coll {} {:score 0} {:multi true})
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.8.0-beta2 and 1.8.0
|
||||||
|
|
||||||
|
### Clojure 1.6
|
||||||
|
|
||||||
|
Monger now depends on `org.clojure/clojure` version `1.6.0`. It is
|
||||||
|
still compatible with Clojure 1.4 and if your `project.clj` depends on
|
||||||
|
a different version, it will be used, but 1.6 is the default now.
|
||||||
|
|
||||||
|
## Changes between 1.8.0-beta1 and 1.8.0-beta2
|
||||||
|
|
||||||
|
### monger.result Use with WriteConcerns is Deprecated
|
||||||
|
|
||||||
|
MongoDB Java driver 2.12.x [no longer guarantees connection affinity](https://github.com/mongodb/mongo-java-driver/releases/tag/r2.12.0-rc0) for thread pool
|
||||||
|
threads.
|
||||||
|
|
||||||
|
This means that `WriteConcern#getLastError` is no longer a safe from concurrency
|
||||||
|
hazards. Therefore the use of `monger.result` functions on `WriteConcern` instances
|
||||||
|
is now **deprecated** in MongoDB Java client and Monger.
|
||||||
|
|
||||||
|
### MongoDB Java Driver Update
|
||||||
|
|
||||||
|
MongoDB Java driver dependency has been [updated to 2.12.x](https://github.com/mongodb/mongo-java-driver/releases/tag/r2.12.0-rc0).
|
||||||
|
|
||||||
|
### Default WriteConcern Change
|
||||||
|
|
||||||
|
Monger now uses [`WriteConcern/ACKNOWLEDGED`](http://api.mongodb.org/java/2.12/com/mongodb/WriteConcern.html#ACKNOWLEDGED) by default. Functionality-wise
|
||||||
|
it is the same as `WriteConcern/SAFE` in earlier versions.
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.7.0 and 1.8.0-beta1
|
||||||
|
|
||||||
|
### monger.core/connect-via-uri
|
||||||
|
|
||||||
|
`monger.core/connect-via-uri` is a version of `monger.core/connect-via-uri!`
|
||||||
|
which returns the connection instead of mutating a var.
|
||||||
|
|
||||||
|
It should be used by projects that are built from reloadable
|
||||||
|
components, together with `monger.multi.*`.
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.7.0-beta1 and 1.7.0
|
||||||
|
|
||||||
|
### MongoDB Java Driver Update
|
||||||
|
|
||||||
|
MongoDB Java driver dependency has been [updated to 2.11.3](https://github.com/mongodb/mongo-java-driver/releases/tag/r2.11.3).
|
||||||
|
|
||||||
|
### Ragtime Dependency Dropped
|
||||||
|
|
||||||
|
Ragtime is now an optional dependency: if your project uses `monger.ragtime`, you
|
||||||
|
need to add Ragtime to your own `project.clj`:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
[ragtime/ragtime.core "0.3.4"]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Validateur Dependency Dropped
|
||||||
|
|
||||||
|
[Validateur](http://clojurevalidations.info) is no longer a dependency.
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.6.0 and 1.7.0-beta1
|
||||||
|
|
||||||
|
### Fune Tuning Cursor Options
|
||||||
|
|
||||||
|
`monger.query` DSL now provides a way to fine tune database cursor
|
||||||
|
options:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(with-collection "products"
|
||||||
|
...
|
||||||
|
(options {:notimeout true, :slaveok false}) ;; where keyword matches Bytes/QUERYOPTION_*
|
||||||
|
(options [:notimeout :slaveok])
|
||||||
|
(options com.mongodb.Bytes/QUERYOPTION_NOTIMEOUT) ;; support Java constants
|
||||||
|
(options :notimeout)
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
`monger.cursor` is a new namespace that provides the plumbing for cursor
|
||||||
|
fine tuning but should not be widely used directly.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Joda Time Integration Improvements: LocalDate
|
||||||
|
|
||||||
|
`LocalDate` instance serialization is now supported
|
||||||
|
by Monger Joda Time integration.
|
||||||
|
|
||||||
|
Contributed by Timo Sulg.
|
||||||
|
|
||||||
|
|
||||||
|
### Clojure 1.3 Is No Longer Supported
|
||||||
|
|
||||||
|
Monger now officially supports Clojure 1.4+.
|
||||||
|
|
||||||
|
|
||||||
|
### Cheshire Upgrade
|
||||||
|
|
||||||
|
[Cheshire](https://github.com/dakrone/cheshire) dependency has been upgraded to 5.2.0
|
||||||
|
|
||||||
|
|
||||||
|
### ClojureWerkz Support Upgrade
|
||||||
|
|
||||||
|
ClojureWerkz Support dependency has been updated to `0.19.0`.
|
||||||
|
|
||||||
|
|
||||||
|
### Validateur 1.5.0
|
||||||
|
|
||||||
|
[Validateur](https://github.com/michaelklishin/validateur) dependency has been upgraded to 1.5.0.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.5.0 and 1.6.0
|
||||||
|
|
||||||
|
### monger.multi.collection
|
||||||
|
|
||||||
|
`monger.multi.collection` is a new namespace with functions that are very similar to those
|
||||||
|
in the `monger.collection` namespace but always take a database reference as an explicit argument.
|
||||||
|
|
||||||
|
They are supposed to be used in cases when relying on `monger.core/*mongodb-database*` is not
|
||||||
|
enough.
|
||||||
|
|
||||||
|
Erik Bakstad contributed most of this work.
|
||||||
|
|
||||||
|
|
||||||
|
### MongoDB Java Driver Update
|
||||||
|
|
||||||
|
MongoDB Java driver dependency has been [updated to 2.11.2](https://github.com/mongodb/mongo-java-driver/wiki/Release-Notes).
|
||||||
|
|
||||||
|
|
||||||
|
### monger.core/drop-db
|
||||||
|
|
||||||
|
`monger.core/drop-db` is a new function that drops a database by name.
|
||||||
|
|
||||||
|
|
||||||
|
### One More Cache Implementation
|
||||||
|
|
||||||
|
`monger.cache/db-aware-monger-cache-factory` will return a MongoDB-backed `clojure.core.cache`
|
||||||
|
implementation that can use any database:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(require '[monger.core :as mg])
|
||||||
|
(require '[monger.cache :as cache])
|
||||||
|
|
||||||
|
(let [db (mg/get-db "altcache")
|
||||||
|
coll "cache_entries"
|
||||||
|
c (cache/db-aware-monger-cache-factory db coll)]
|
||||||
|
(comment "This cache instance will use the altcache DB"))
|
||||||
|
```
|
||||||
|
|
||||||
|
### Ragtime changes
|
||||||
|
|
||||||
|
Bug fix: `monger.ragtime/applied-migration-ids` now returns a vector (instead of a set) in order to preserve the original creation order of the migrations.
|
||||||
|
|
||||||
|
Ragtime dependency has been updated to 0.3.3.
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.4.0 and 1.5.0
|
||||||
|
|
||||||
|
### Full Text Search Support
|
||||||
|
|
||||||
|
Full text search in MongoDB 2.4 can be used via commands but Monger 1.5 also provides
|
||||||
|
convenience functions in the `monger.search` namespace:
|
||||||
|
|
||||||
|
* `monger.search/search` for performing queries
|
||||||
|
* `monger.search/results-from` for obtaining hit documents sorted by score
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(require '[monger.collection :as mc])
|
||||||
|
(require '[monger.search :as ms])
|
||||||
|
|
||||||
|
(mc/ensure-index coll {:subject "text" :content "text"})
|
||||||
|
(mc/insert coll {:subject "hello there" :content "this should be searchable"})
|
||||||
|
(mc/insert coll {:subject "untitled" :content "this is just noize"})
|
||||||
|
|
||||||
|
(println (ms/results-from (ms/search coll "hello"))
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### MongoDB Java Driver Update
|
||||||
|
|
||||||
|
MongoDB Java driver dependency has been [updated to 2.11.0](https://github.com/mongodb/mongo-java-driver/wiki/Release-Notes).
|
||||||
|
|
||||||
|
|
||||||
|
### New Geospatial Operators
|
||||||
|
|
||||||
|
`monger.operators` now defines a few more operators for convenience:
|
||||||
|
|
||||||
|
* `$getWithin`
|
||||||
|
* `$getIntersects`
|
||||||
|
* `$near`
|
||||||
|
|
||||||
|
Of course, these and any other new operators can be passed as strings (e.g. `"$near"`)
|
||||||
|
as well.
|
||||||
|
|
||||||
|
|
||||||
|
### monger.core/admin-db
|
||||||
|
|
||||||
|
`monger.core/admin-db` is a new convenience function that returns the `admin` database
|
||||||
|
reference.
|
||||||
|
|
||||||
|
### monger.command/admin-command
|
||||||
|
|
||||||
|
`monger.command/admin-command` is a new convenience function for running commands
|
||||||
|
on the `admin` database.
|
||||||
|
|
||||||
|
|
||||||
|
### monger.core/mongo-options Updates
|
||||||
|
|
||||||
|
`monger.core/mongo-options` options are now up-to-date with the most recent
|
||||||
|
MongoDB Java driver.
|
||||||
|
|
||||||
|
### Factory DSL Is Gone
|
||||||
|
|
||||||
|
Monger's factory DSL (an undocumented experimental feature) has been removed from `monger.testkit`. It did
|
||||||
|
not work as well as we expected and there are better alternatives available now.
|
||||||
|
|
||||||
|
|
||||||
|
### Clojure 1.5 By Default
|
||||||
|
|
||||||
|
Monger now depends on `org.clojure/clojure` version `1.5.1`. It is still compatible with Clojure 1.3+ and if your `project.clj` depends
|
||||||
|
on a different version, it will be used, but 1.5 is the default now.
|
||||||
|
|
||||||
|
We encourage all users to upgrade to 1.5, it is a drop-in replacement for the majority of projects out there.
|
||||||
|
|
||||||
|
### Authentication On Default Database
|
||||||
|
|
||||||
|
`monger.core/authenticate` now has a 2-arity version that will authenticate
|
||||||
|
on the default database:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(let [username "myservice"
|
||||||
|
pwd "LGo5h#B`cTRQ>28tba6u"]
|
||||||
|
(monger.core/use-db! "mydb")
|
||||||
|
;; authenticates requests for mydb
|
||||||
|
(monger.core/authenticate username (.toCharArray pwd)))
|
||||||
|
```
|
||||||
|
|
||||||
|
### ClojureWerkz Support Upgrade
|
||||||
|
|
||||||
|
ClojureWerkz Support dependency has been updated to version `0.15.0`.
|
||||||
|
This means Monger now will use Cheshire `5.0.x`.
|
||||||
|
|
||||||
|
|
||||||
|
### Explicit DBCursor Closure by monger.collection/find-maps and the like
|
||||||
|
|
||||||
|
`monger.collection/find-maps` and the like will now explicitly close DB cursors.
|
||||||
|
|
||||||
|
GH issue: 47
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.3.0 and 1.4.0
|
||||||
|
|
||||||
|
### Cheshire Upgrade
|
||||||
|
|
||||||
|
`clojurewerkz.support.json` now requires [Cheshire] `5.0`. There were some incompatible changes
|
||||||
|
in Cheshire `5.0`, see [Cheshire change log](https://github.com/dakrone/cheshire/blob/master/ChangeLog.md#changes-between-cheshire-500-and-40x).
|
||||||
|
|
||||||
|
|
||||||
|
### data.json Dependency Fixes
|
||||||
|
|
||||||
|
`monger.json` no longer requires `data.json` to be present at compile time.
|
||||||
|
|
||||||
|
|
||||||
|
### MongoDB Java Driver Update
|
||||||
|
|
||||||
|
MongoDB Java driver dependency has been updated to 2.10.0.
|
||||||
|
|
||||||
|
|
||||||
|
### ClojureWerkz Support Upgrade
|
||||||
|
|
||||||
|
ClojureWerkz Support dependency has been updated to version `0.9.0`.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.2.0 and 1.3.0
|
||||||
|
|
||||||
|
### monger.core/disconnect!
|
||||||
|
|
||||||
|
`monger.core/disconnect!` closes the default database connection.
|
||||||
|
|
||||||
|
|
||||||
|
### Ragtime 0.3.0
|
||||||
|
|
||||||
|
Ragtime dependency has been updated to 0.3.0.
|
||||||
|
|
||||||
|
|
||||||
|
### MongoDB Java Driver Update
|
||||||
|
|
||||||
|
MongoDB Java driver dependency has been updated to 2.9.2.
|
||||||
|
|
||||||
|
|
||||||
|
### Cheshire Support
|
||||||
|
|
||||||
|
`monger.json` and `monger.joda-time` will now use [Cheshire](https://github.com/dakrone/cheshire) if it is available. [clojure.data.json](https://github.com/clojure/data.json)
|
||||||
|
is no longer a hard dependency (but still supported if available).
|
||||||
|
|
||||||
|
Because `clojure.data.json` is no longer a hard Monger dependency, you need to either add it as explicit
|
||||||
|
dependency to your project or switch to Cheshire.
|
||||||
|
|
||||||
|
To switch to Cheshire (you may need to update your code that uses `clojure.data.json` directly!),
|
||||||
|
add the following to your `:dependencies` list:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
[cheshire "4.0.3"]
|
||||||
|
```
|
||||||
|
|
||||||
|
For `clojure.data.json` version `0.1.2.`:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
[org.clojure/data.json "0.2.0"]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### ClojureWerkz Support 0.7.0
|
||||||
|
|
||||||
|
ClojureWerkz Support dependency has been updated to version `0.7.0`.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.1.0 and 1.2.0
|
||||||
|
|
||||||
|
### Clojure 1.4 By Default
|
||||||
|
|
||||||
|
Monger now depends on `org.clojure/clojure` version `1.4.0`. It is still compatible with Clojure 1.3 and if your `project.clj` depends
|
||||||
|
on 1.3, it will be used, but 1.4 is the default now.
|
||||||
|
|
||||||
|
We encourage all users to upgrade to 1.4, it is a drop-in replacement for the majority of projects out there.
|
||||||
|
|
||||||
|
|
||||||
|
### monger.joda-time no longer requires clojure.data.json
|
||||||
|
|
||||||
|
`monger.joda-time` no longer requires `clojure.data.json`. If `clojure.data.json` is available, it will be loaded
|
||||||
|
and extended. If not, `monger.joda-time` will only extend Clojure reader and BSON dates serialization/deserialization.
|
||||||
|
|
||||||
|
|
||||||
|
### MongoDB Java driver 2.9.0
|
||||||
|
|
||||||
|
MongoDB Java driver dependency has been updated to 2.9.0.
|
||||||
|
|
||||||
|
|
||||||
|
### Eliminated Reflection Warnings in monger.joda-time
|
||||||
|
|
||||||
|
`monger.joda-time` functions no longer result in reflective method calls.
|
||||||
|
|
||||||
|
Contributed by [Baishampayan Ghose](https://github.com/ghoseb).
|
||||||
|
|
||||||
|
|
||||||
|
### ClojureWerkz Support 0.6.0
|
||||||
|
|
||||||
|
ClojureWerkz Support dependency has been updated to version `0.6.0`.
|
||||||
|
|
||||||
|
|
||||||
|
### Monger Query DSL now supports low level options on cursors
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(with-collection coll
|
||||||
|
(find {})
|
||||||
|
(paginate :page 1 :per-page 3)
|
||||||
|
(sort { :title 1 })
|
||||||
|
(read-preference ReadPreference/PRIMARY)
|
||||||
|
(options com.mongodb.Bytes/QUERYOPTION_NOTIMEOUT))
|
||||||
|
```
|
||||||
|
|
||||||
|
### monger.collection/insert-and-return no longer forcefully replaces existing document id
|
||||||
|
|
||||||
|
`monger.collection/insert-and-return` now preserves existing document ids, just like `monger.collection/save-and-return` does.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.1.0-rc1 and 1.1.0
|
||||||
|
|
||||||
|
No changes.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.1.0-beta2 and 1.1.0-rc1
|
||||||
|
|
||||||
|
### monger.collection/save-and-return
|
||||||
|
|
||||||
|
`monger.collection/save-and-return` is a new function that to `monger.collection/save` is what `monger.collection/insert-and-return`
|
||||||
|
is to `monger.collection/insert`. See Monger 1.1.0-beta1 changes or function documentation strings for more information.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.1.0-beta1 and 1.1.0-beta2
|
||||||
|
|
||||||
|
### Support for passing keywords as collection names
|
||||||
|
|
||||||
|
It is now possible to use Clojure keywords as collection names with `monger.collection` functions.
|
||||||
|
For example, `monger.collection/insert-and-return` that's given collection name as `:people` will store
|
||||||
|
treat it as `people` (by applying [clojure.core/name](http://clojuredocs.org/clojure_core/clojure.core/name) to the argument).
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.1.0-alpha3 and 1.1.0-beta1
|
||||||
|
|
||||||
|
### monger.collection/insert-and-return
|
||||||
|
|
||||||
|
`monger.collection/insert-and-return` is a new function that solves the biggest complain about Monger's `monger.collection/insert` behavior
|
||||||
|
from Monger 1.0 users. Because `monger.collection/insert` returns a write result and is supposed to be used with Validateur and
|
||||||
|
`monger.result/ok?` and similar functions, it is hard to retrieve object id in case it wasn't explicitly passed in.
|
||||||
|
|
||||||
|
This resulted in code that looks more or less like this:
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(let [oid (ObjectId.)
|
||||||
|
result (merge doc {:_id oid)]
|
||||||
|
(monger.collection/insert "documents" result)
|
||||||
|
result)
|
||||||
|
```
|
||||||
|
|
||||||
|
To solve this problem, we introduce a new function, `monger.collection/insert-and-return`, that returns the exact inserted document
|
||||||
|
as an immutable Clojure map. The `:_id` key will be available on the returned map, even if wasn't present and had to be generated.
|
||||||
|
|
||||||
|
`monger.collection/insert` behavior stays the same both because of backwards compatibility concerns and because there are valid cases
|
||||||
|
when a user may want to have the write result returned.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.1.0-alpha2 and 1.1.0-alpha3
|
||||||
|
|
||||||
|
### Clojure reader extensions
|
||||||
|
|
||||||
|
`monger.joda-time` now extends Clojure reader for Joda Time types so the new Clojure reader-based
|
||||||
|
Ring session store can store Joda dates/time values.
|
||||||
|
|
||||||
|
|
||||||
|
## Changes between 1.0.0 and 1.1.0-alpha2
|
||||||
|
|
||||||
|
### Alternative, Clojure reader-based Ring session store implementation
|
||||||
|
|
||||||
|
Monger 1.1 will have an alternative Ring session store uses Clojure reader serialization
|
||||||
|
|
||||||
|
This way libraries like Friend, that use namespaced keywords (like `::identity`) and other
|
||||||
|
Clojure-specific data structures will work well with Monger.
|
||||||
|
|
||||||
|
Current store will strip off namespace information from namespaced keywords
|
||||||
|
because clojure.core/name work that way. For example:
|
||||||
|
|
||||||
|
|
||||||
|
``` clojure
|
||||||
|
(name ::identity)
|
||||||
|
```
|
||||||
|
|
||||||
|
Reported by Julio Barros.
|
||||||
|
|
||||||
In the Monger 1.0.0 release jar, development dependencies were erroneously included in the jar. This affected
|
|
||||||
projects using, for example, a different version of `clojure.core.cache`. This wasn't intentional and `1.0.1` is released
|
|
||||||
to address the problem.
|
|
||||||
|
|
||||||
|
|
||||||
## Changes between 1.0.0-rc2 and 1.0.0
|
## Changes between 1.0.0-rc2 and 1.0.0
|
||||||
|
|
@ -458,7 +1139,7 @@ If you need to use `keywordize`, use 4-arity:
|
||||||
### Query DSL has a way to specify if fields need to be keywordized
|
### Query DSL has a way to specify if fields need to be keywordized
|
||||||
|
|
||||||
It is now possible to opt-out of field keywordization in the query DSL:
|
It is now possible to opt-out of field keywordization in the query DSL:
|
||||||
|
|
||||||
``` clojure
|
``` clojure
|
||||||
(with-collection coll
|
(with-collection coll
|
||||||
(find {})
|
(find {})
|
||||||
|
|
@ -484,7 +1165,7 @@ monger.collection/find-map-by-id no longer ignore fields argument. Contributed b
|
||||||
### Meet monger.db and monger.command
|
### Meet monger.db and monger.command
|
||||||
|
|
||||||
`monger.db` namespace was added to perform operations like adding users or dropping databases. Several functions from
|
`monger.db` namespace was added to perform operations like adding users or dropping databases. Several functions from
|
||||||
`monger.core` will eventually be moved there, but not for 1.0.
|
`monger.core` will eventually be moved there, but not for 1.0.
|
||||||
|
|
||||||
`monger.command` namespace includes convenience methods for issuing MongoDB commands.
|
`monger.command` namespace includes convenience methods for issuing MongoDB commands.
|
||||||
|
|
||||||
|
|
@ -500,4 +1181,3 @@ given ObjectId. `monger.collection/remove-by-id` is its counterpart for removing
|
||||||
### monger.core/get-db-names
|
### monger.core/get-db-names
|
||||||
|
|
||||||
monger.core/get-db-names returns a set of databases. Contributed by Toby Hede.
|
monger.core/get-db-names returns a set of databases. Contributed by Toby Hede.
|
||||||
|
|
||||||
|
|
|
||||||
110
README.md
110
README.md
|
|
@ -1,46 +1,43 @@
|
||||||
# Monger, a modern Clojure MongoDB Driver
|
# Monger, a modern Clojure MongoDB Driver
|
||||||
|
[](https://travis-ci.org/xingzhefeng/monger)
|
||||||
|
Monger is an idiomatic [Clojure MongoDB driver](http://clojuremongodb.info) for a more civilized age.
|
||||||
|
|
||||||
Monger is an idiomatic Clojure MongoDB driver for a more civilized age.
|
It has batteries included, offers powerful expressive query DSL,
|
||||||
|
strives to support modern MongoDB features and have the "look and feel" and
|
||||||
|
flexibility of the MongoDB shell.
|
||||||
|
|
||||||
It has batteries included, offers powerful expressive query DSL, strives to support every MongoDB 2.0+ feature and has sane defaults. Monger is built from the
|
Monger is built from for modern Clojure versions and sits on top of
|
||||||
ground up for Clojure 1.3+ and sits on top of the official MongoDB Java driver.
|
the official MongoDB Java driver.
|
||||||
|
|
||||||
|
|
||||||
## Project Goals
|
## Project Goals
|
||||||
|
|
||||||
There is one MongoDB client for Clojure that has been around since 2009. So, why create another one? Monger authors
|
There is one MongoDB client for Clojure that has been around since 2009. So, why create another one? Monger authors
|
||||||
wanted a client that will
|
wanted a client that would
|
||||||
|
|
||||||
* Support most of MongoDB 2.0+ features but only those that really matter. Grouping the way it is done today, for example, does not (it is easier to just use Map/Reduce directly).
|
* Support most of modern MongoDB features, focus on those that really matter.
|
||||||
* Be [well documented](http://clojuremongodb.info).
|
* Be [well documented](http://clojuremongodb.info).
|
||||||
* Be well tested.
|
* Be [well tested](https://github.com/michaelklishin/monger/tree/master/test/monger/test).
|
||||||
* Be maintained, do not carry technical debt from 2009 forever.
|
* Target modern Clojure versions.
|
||||||
* Target Clojure 1.3.0 and later from the ground up.
|
* Be as close to the Mongo shell query language as practical
|
||||||
* Integrate with libraries like clojure.data.json, Joda Time, [Ragtime](https://github.com/weavejester/ragtime).
|
* Integrate with libraries like Joda Time, [Cheshire](https://github.com/dakrone/cheshire), clojure.data.json, [Ragtime](https://github.com/weavejester/ragtime).
|
||||||
* Provide support for unit testing: factories/fixtures DSL, collection cleaner functions, clojure.test integration and so on.
|
|
||||||
* Support URI connections to be friendly to Heroku and other PaaS providers.
|
* Support URI connections to be friendly to Heroku and other PaaS providers.
|
||||||
* Learn from other clients like the Java and Ruby ones.
|
* Not carry technical debt from 2009 forever.
|
||||||
* Integrate usage of JavaScript files and ClojureScript (as soon as the compiler gets artifact it is possible to depend on for easy embedding).
|
* Integrate usage of JavaScript files and ClojureScript (as soon as the compiler gets artifact it is possible to depend on for easy embedding).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Community
|
|
||||||
|
|
||||||
[Monger has a mailing list](https://groups.google.com/forum/#!forum/clojure-mongodb). Feel free to join it and ask any questions you may have.
|
|
||||||
|
|
||||||
To subscribe for announcements of releases, important changes and so on, please follow [@ClojureWerkz](https://twitter.com/#!/clojurewerkz) on Twitter.
|
|
||||||
|
|
||||||
|
|
||||||
## Project Maturity
|
## Project Maturity
|
||||||
|
|
||||||
Monger is not a young project: it will be 1 year old around July 2012, with active production use from week 1.
|
Monger is not a young project: started in July 2011, it is over 7
|
||||||
|
years old with active production use from week 1.
|
||||||
|
|
||||||
|
|
||||||
## Artifacts
|
## Artifacts
|
||||||
|
|
||||||
Monger artifacts are [released to Clojars](https://clojars.org/com.novemberain/monger). If you are using Maven, add the following repository
|
Monger artifacts are [released to
|
||||||
definition to your `pom.xml`:
|
Clojars](https://clojars.org/com.novemberain/monger). If you are using
|
||||||
|
Maven, add the following repository definition to your `pom.xml`:
|
||||||
|
|
||||||
``` xml
|
``` xml
|
||||||
<repository>
|
<repository>
|
||||||
|
|
@ -53,66 +50,61 @@ definition to your `pom.xml`:
|
||||||
|
|
||||||
With Leiningen:
|
With Leiningen:
|
||||||
|
|
||||||
[com.novemberain/monger "1.0.1"]
|
[com.novemberain/monger "3.5.0"]
|
||||||
|
|
||||||
|
|
||||||
With Maven:
|
With Maven:
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.novemberain</groupId>
|
<groupId>com.novemberain</groupId>
|
||||||
<artifactId>monger</artifactId>
|
<artifactId>monger</artifactId>
|
||||||
<version>1.0.1</version>
|
<version>3.5.0</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
Please refer to our [Getting Started guide](http://clojuremongodb.info/articles/getting_started.html). Don't hesitate to join our [mailing list](https://groups.google.com/forum/#!forum/clojure-mongodb) and ask questions, too!
|
Please refer to our [Getting Started
|
||||||
|
guide](http://clojuremongodb.info/articles/getting_started.html). Don't
|
||||||
|
hesitate to join our [mailing
|
||||||
|
list](https://groups.google.com/forum/#!forum/clojure-mongodb) and ask
|
||||||
|
questions, too!
|
||||||
|
|
||||||
|
|
||||||
## Documentation & Examples
|
## Documentation & Examples
|
||||||
|
|
||||||
Please visit our [documentation site](http://clojuremongodb.info/). Our [test suite](https://github.com/michaelklishin/monger/tree/master/test/monger/test) also has many code examples.
|
Please see our [documentation guides site](http://clojuremongodb.info/) and [API reference](http://reference.clojuremongodb.info).
|
||||||
|
|
||||||
|
Our [test suite](https://github.com/michaelklishin/monger/tree/master/test/monger/test)
|
||||||
|
also has many code examples.
|
||||||
|
|
||||||
|
|
||||||
|
## Community
|
||||||
|
|
||||||
|
[Monger has a mailing list](https://groups.google.com/forum/#!forum/clojure-mongodb). Feel
|
||||||
|
free to join it and ask any questions you may have.
|
||||||
|
|
||||||
|
To subscribe for announcements of releases, important changes and so
|
||||||
|
on, please follow [@ClojureWerkz](https://twitter.com/#!/clojurewerkz)
|
||||||
|
on Twitter.
|
||||||
|
|
||||||
|
|
||||||
## Supported Clojure versions
|
## Supported Clojure versions
|
||||||
|
|
||||||
Monger is built from the ground up for Clojure 1.3 and up.
|
Monger requires Clojure 1.8+. The most recent
|
||||||
|
stable release is highly recommended.
|
||||||
|
|
||||||
|
|
||||||
## Continuous Integration Status
|
## Continuous Integration Status
|
||||||
|
|
||||||
[](http://travis-ci.org/michaelklishin/monger)
|
[](http://travis-ci.org/michaelklishin/monger)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### Write Performance
|
|
||||||
|
|
||||||
Monger insert operations are efficient and have very little overhead compared to the underlying Java driver. Here
|
|
||||||
are some numbers on a MacBook Pro from fall 2010 with Core i7 and an Intel SSD drive:
|
|
||||||
|
|
||||||
```
|
|
||||||
Testing monger.test.stress
|
|
||||||
Inserting 1000 documents...
|
|
||||||
"Elapsed time: 25.699 msecs"
|
|
||||||
Inserting 10000 documents...
|
|
||||||
"Elapsed time: 135.069 msecs"
|
|
||||||
Inserting 100000 documents...
|
|
||||||
"Elapsed time: 515.969 msecs"
|
|
||||||
```
|
|
||||||
|
|
||||||
With the `SAFE` write concern, it takes roughly 0.5 second to insert 100,000 documents with Clojure 1.3.0.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Monger Is a ClojureWerkz Project
|
## Monger Is a ClojureWerkz Project
|
||||||
|
|
||||||
Monger is part of the [group of Clojure libraries known as ClojureWerkz](http://clojurewerkz.org), together with
|
Monger is part of the [group of Clojure libraries known as ClojureWerkz](http://clojurewerkz.org), together with
|
||||||
[Neocons](https://github.com/michaelklishin/neocons), [Langohr](https://github.com/michaelklishin/langohr), [Elastisch](https://github.com/clojurewerkz/elastisch), [Welle](https://github.com/michaelklishin/welle), [Quartzite](https://github.com/michaelklishin/quartzite) and several others.
|
[Cassaforte](http://clojurecassandra.info), [Langohr](http://clojurerabbitmq.info), [Elastisch](http://clojureelasticsearch.info), [Quartzite](http://clojurequartz.info) and several others.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
@ -120,7 +112,14 @@ Monger is part of the [group of Clojure libraries known as ClojureWerkz](http://
|
||||||
Monger uses [Leiningen 2](https://github.com/technomancy/leiningen/blob/master/doc/TUTORIAL.md). Make sure you have it installed and then run tests against
|
Monger uses [Leiningen 2](https://github.com/technomancy/leiningen/blob/master/doc/TUTORIAL.md). Make sure you have it installed and then run tests against
|
||||||
supported Clojure versions using
|
supported Clojure versions using
|
||||||
|
|
||||||
lein2 all test
|
./bin/ci/before_script.sh
|
||||||
|
lein all do clean, javac, test
|
||||||
|
|
||||||
|
Or, if you don't have mongodb installed, you can use docker
|
||||||
|
|
||||||
|
docker-compose up
|
||||||
|
./bin/ci/before_script_docker.sh
|
||||||
|
lein all do clean, javac, test
|
||||||
|
|
||||||
Then create a branch and make your changes on it. Once you are done with your changes and all tests pass, submit a pull request
|
Then create a branch and make your changes on it. Once you are done with your changes and all tests pass, submit a pull request
|
||||||
on Github.
|
on Github.
|
||||||
|
|
@ -129,6 +128,7 @@ on Github.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
Copyright (C) 2011-2012 Michael S. Klishin
|
Copyright (C) 2011-2018 [Michael S. Klishin](http://twitter.com/michaelklishin), Alex Petrov, and the ClojureWerkz team.
|
||||||
|
|
||||||
Distributed under the [Eclipse Public License](http://www.eclipse.org/legal/epl-v10.html), the same as Clojure.
|
Double licensed under the [Eclipse Public License](http://www.eclipse.org/legal/epl-v10.html) (the same as Clojure) or
|
||||||
|
the [Apache Public License 2.0](http://www.apache.org/licenses/LICENSE-2.0.html).
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,18 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Check which MongoDB shell is available
|
||||||
|
if command -v mongosh >/dev/null 2>&1; then
|
||||||
|
MONGO_SHELL="mongosh"
|
||||||
|
elif command -v mongo >/dev/null 2>&1; then
|
||||||
|
MONGO_SHELL="mongo"
|
||||||
|
else
|
||||||
|
echo "Error: Neither mongo nor mongosh shell found. Please install MongoDB shell."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# MongoDB Java driver won't run authentication twice on the same DB instance,
|
# MongoDB Java driver won't run authentication twice on the same DB instance,
|
||||||
# so we need to use multiple DBs.
|
# so we need to use multiple DBs.
|
||||||
mongo --eval 'db.addUser("clojurewerkz/monger", "monger")' monger-test
|
$MONGO_SHELL --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], mechanisms: ["SCRAM-SHA-1"], passwordDigestor: "client"})' monger-test
|
||||||
mongo --eval 'db.addUser("clojurewerkz/monger", "monger")' monger-test2
|
$MONGO_SHELL --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], mechanisms: ["SCRAM-SHA-1"], passwordDigestor: "client"})' monger-test2
|
||||||
mongo --eval 'db.addUser("clojurewerkz/monger", "monger")' monger-test3
|
$MONGO_SHELL --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], mechanisms: ["SCRAM-SHA-1"], passwordDigestor: "client"})' monger-test3
|
||||||
mongo --eval 'db.addUser("clojurewerkz/monger", "monger")' monger-test4
|
$MONGO_SHELL --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], mechanisms: ["SCRAM-SHA-1"], passwordDigestor: "client"})' monger-test4
|
||||||
18
bin/ci/before_script_docker.sh
Executable file
18
bin/ci/before_script_docker.sh
Executable file
|
|
@ -0,0 +1,18 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Check which MongoDB shell is available in the container
|
||||||
|
if docker exec mongo_test which mongosh >/dev/null 2>&1; then
|
||||||
|
MONGO_SHELL="mongosh"
|
||||||
|
elif docker exec mongo_test which mongo >/dev/null 2>&1; then
|
||||||
|
MONGO_SHELL="mongo"
|
||||||
|
else
|
||||||
|
echo "Error: Neither mongo nor mongosh shell found in the container."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# MongoDB Java driver won't run authentication twice on the same DB instance,
|
||||||
|
# so we need to use multiple DBs.
|
||||||
|
docker exec mongo_test $MONGO_SHELL --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], mechanisms: ["SCRAM-SHA-1"], passwordDigestor: "client"})' monger-test
|
||||||
|
docker exec mongo_test $MONGO_SHELL --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], mechanisms: ["SCRAM-SHA-1"], passwordDigestor: "client"})' monger-test2
|
||||||
|
docker exec mongo_test $MONGO_SHELL --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], mechanisms: ["SCRAM-SHA-1"], passwordDigestor: "client"})' monger-test3
|
||||||
|
docker exec mongo_test $MONGO_SHELL --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], mechanisms: ["SCRAM-SHA-1"], passwordDigestor: "client"})' monger-test4
|
||||||
11
bin/ci/before_script_server_3.6.x.sh
Executable file
11
bin/ci/before_script_server_3.6.x.sh
Executable file
|
|
@ -0,0 +1,11 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# MongoDB seems to need some time to boot first. MK.
|
||||||
|
sleep 5
|
||||||
|
|
||||||
|
# MongoDB Java driver won't run authentication twice on the same DB instance,
|
||||||
|
# so we need to use multiple DBs.
|
||||||
|
mongo --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], passwordDigestor: "client"})' monger-test
|
||||||
|
mongo --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], passwordDigestor: "client"})' monger-test2
|
||||||
|
mongo --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], passwordDigestor: "client"})' monger-test3
|
||||||
|
mongo --eval 'db.createUser({"user": "clojurewerkz/monger", "pwd": "monger", roles: ["dbAdmin"], passwordDigestor: "client"})' monger-test4
|
||||||
8
bin/ci/install_mongodb.sh
Executable file
8
bin/ci/install_mongodb.sh
Executable file
|
|
@ -0,0 +1,8 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||||
|
|
||||||
|
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/4.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||||
|
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y mongodb-org
|
||||||
11
docker-compose.yml
Normal file
11
docker-compose.yml
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Use root/example as user/password credentials
|
||||||
|
version: '3.1'
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
mongo:
|
||||||
|
image: mongo
|
||||||
|
container_name: mongo_test
|
||||||
|
restart: always
|
||||||
|
ports:
|
||||||
|
- "27017:27017"
|
||||||
84
project.clj
84
project.clj
|
|
@ -1,54 +1,58 @@
|
||||||
(defproject com.novemberain/monger "1.0.2-SNAPSHOT"
|
(defproject com.novemberain/monger "4.0.0-SNAPSHOT"
|
||||||
:description "Monger is a Clojure MongoDB client for a more civilized age: friendly, flexible and with batteries included"
|
:description "Monger is a Clojure MongoDB client for a more civilized age: friendly, flexible and with batteries included"
|
||||||
:min-lein-version "2.0.0"
|
:url "http://clojuremongodb.info"
|
||||||
:license {:name "Eclipse Public License"}
|
:min-lein-version "2.5.1"
|
||||||
:dependencies [[org.clojure/clojure "1.3.0"]
|
:license {:name "Eclipse Public License"
|
||||||
[org.mongodb/mongo-java-driver "2.8.0"]
|
:url "http://www.eclipse.org/legal/epl-v10.html"}
|
||||||
[com.novemberain/validateur "1.1.0"]
|
:dependencies [[org.clojure/clojure "1.11.1"]
|
||||||
[clojurewerkz/support "0.5.0"]
|
[org.mongodb/mongodb-driver "3.12.11"]
|
||||||
[ragtime/ragtime.core "0.2.0"]]
|
[clojurewerkz/support "1.5.0"]]
|
||||||
:test-selectors {:default (fn [m]
|
:test-selectors {:default (fn [m]
|
||||||
(and (not (:performance m))
|
(and (not (:performance m))
|
||||||
(not (:edge-features m))))
|
(not (:edge-features m))
|
||||||
:focus :focus
|
(not (:time-consuming m))))
|
||||||
:indexing :indexing
|
:focus :focus
|
||||||
:external :external
|
:authentication :authentication
|
||||||
:cache :cache
|
:updating :updating
|
||||||
:gridfs :gridfs
|
:indexing :indexing
|
||||||
:command :command
|
:external :external
|
||||||
:performance :performance
|
:cache :cache
|
||||||
|
:gridfs :gridfs
|
||||||
|
:command :command
|
||||||
|
:integration :integration
|
||||||
|
:performance :performance
|
||||||
;; as in, edge mongodb server
|
;; as in, edge mongodb server
|
||||||
:edge-features :edge-features
|
:edge-features :edge-features
|
||||||
|
:time-consuming :time-consuming
|
||||||
:all (constantly true)}
|
:all (constantly true)}
|
||||||
:source-paths ["src/clojure"]
|
:source-paths ["src/clojure"]
|
||||||
:java-source-paths ["src/java"]
|
:java-source-paths ["src/java"]
|
||||||
:javac-options ["-target" "1.6" "-source" "1.6"]
|
:javac-options ["-target" "1.8" "-source" "1.8"]
|
||||||
:codox {:exclude [monger.internal.pagination
|
|
||||||
monger.internal.fn
|
|
||||||
;; these are not fully baked yet or have changes
|
|
||||||
;; that are not entirely backwards compatible with 1.0. MK.
|
|
||||||
monger.testkit
|
|
||||||
monger.ring.session-store]}
|
|
||||||
:mailing-list {:name "clojure-mongodb"
|
:mailing-list {:name "clojure-mongodb"
|
||||||
:archive "https://groups.google.com/group/clojure-mongodb"
|
:archive "https://groups.google.com/group/clojure-mongodb"
|
||||||
:post "clojure-mongodb@googlegroups.com"}
|
:post "clojure-mongodb@googlegroups.com"}
|
||||||
:profiles {:1.4 {:dependencies [[org.clojure/clojure "1.4.0"]]}
|
:profiles {:1.10 {:dependencies [[org.clojure/clojure "1.10.2"]]}
|
||||||
:1.5 {:dependencies [[org.clojure/clojure "1.5.0-master-SNAPSHOT"]]}
|
:1.9 {:dependencies [[org.clojure/clojure "1.9.0"]]}
|
||||||
:dev {:resource-paths ["test/resources"]
|
:dev {:resource-paths ["test/resources"]
|
||||||
:dependencies [[clj-time "0.4.2" :exclusions [org.clojure/clojure]]
|
:dependencies [[clj-time "0.15.1" :exclusions [org.clojure/clojure]]
|
||||||
[org.clojure/data.json "0.1.2" :exclusions [org.clojure/clojure]]
|
[cheshire "5.8.1" :exclusions [org.clojure/clojure]]
|
||||||
[org.clojure/tools.cli "0.2.1" :exclusions [org.clojure/clojure]]
|
[org.clojure/data.json "2.5.0" :exclusions [org.clojure/clojure]]
|
||||||
[org.clojure/core.cache "0.6.0" :exclusions [org.clojure/clojure]]
|
[org.clojure/tools.cli "0.4.1" :exclusions [org.clojure/clojure]]
|
||||||
[ring/ring-core "1.1.0"]]
|
[org.clojure/core.cache "0.7.1" :exclusions [org.clojure/clojure]]
|
||||||
:plugins [[codox "0.6.1"]]
|
[ring/ring-core "1.7.1" :exclusions [org.clojure/clojure]]
|
||||||
:codox {:sources ["src/clojure"]
|
[com.novemberain/validateur "2.6.0" :exclusions [org.clojure/clojure]]
|
||||||
:output-dir "doc/api"}}}
|
[ch.qos.logback/logback-classic "1.2.3" :exclusions [org.slf4j/slf4j-api]]
|
||||||
:aliases {"all" ["with-profile" "dev:dev,1.4:dev,1.5"]
|
[ragtime/core "0.7.2" :exclusions [org.clojure/clojure]]]
|
||||||
"ci" ["with-profile" "dev:dev,1.4"]}
|
:plugins [[lein-codox "0.10.5"]]
|
||||||
:repositories {"sonatype" {:url "http://oss.sonatype.org/content/repositories/releases"
|
:codox {:source-paths ["src/clojure"]
|
||||||
|
:namespaces [#"^monger\.(?!internal)"]}}
|
||||||
|
;; only clj-time/JodaTime available, used to test monger.joda-time w/o clojure.data.json
|
||||||
|
:dev2 {:resource-paths ["test/resources"]
|
||||||
|
:dependencies [[clj-time "0.15.2" :exclusions [org.clojure/clojure]]]}}
|
||||||
|
:aliases {"all" ["with-profile" "dev:dev,1.10:dev,1.9:dev"]}
|
||||||
|
:repositories {"sonatype" {:url "https://oss.sonatype.org/content/repositories/releases"
|
||||||
:snapshots false
|
:snapshots false
|
||||||
:releases {:checksum :fail :update :always}}
|
:releases {:checksum :fail :update :always}}
|
||||||
"sonatype-snapshots" {:url "http://oss.sonatype.org/content/repositories/snapshots"
|
"sonatype-snapshots" {:url "https://oss.sonatype.org/content/repositories/snapshots"
|
||||||
:snapshots true
|
:snapshots true
|
||||||
:releases {:checksum :fail :update :always}}}
|
:releases {:checksum :fail :update :always}}})
|
||||||
:aot [monger.conversion])
|
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,46 @@
|
||||||
(ns ^{:doc "clojure.core.cache implementation(s) on top of MongoDB.
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
|
;;
|
||||||
|
;; The APL v2.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
|
;;
|
||||||
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
(ns monger.cache
|
||||||
|
"clojure.core.cache implementation(s) on top of MongoDB.
|
||||||
|
|
||||||
Related documentation guide: http://clojuremongodb.info/articles/integration.html"
|
Related documentation guide: http://clojuremongodb.info/articles/integration.html"
|
||||||
:author "Michael S. Klishin"}
|
(:require [monger.collection :as mc :refer [find-one find-by-id find-map-by-id]]
|
||||||
monger.cache
|
[clojure.core.cache :as cache]
|
||||||
(:require [monger.collection :as mc]
|
[monger.conversion :as cnv])
|
||||||
[clojure.core.cache :as cache])
|
(:import clojure.core.cache.CacheProtocol
|
||||||
(:use monger.conversion)
|
[com.mongodb DB DBObject WriteConcern]
|
||||||
(:import [clojure.core.cache CacheProtocol]))
|
java.util.Map))
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; Implementation
|
;; Implementation
|
||||||
|
|
@ -19,36 +53,33 @@
|
||||||
;; API
|
;; API
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defrecord BasicMongerCache [collection])
|
(defrecord BasicMongerCache [db collection])
|
||||||
|
|
||||||
(extend-protocol cache/CacheProtocol
|
(extend-protocol cache/CacheProtocol
|
||||||
BasicMongerCache
|
BasicMongerCache
|
||||||
(lookup [c k]
|
(lookup [c k]
|
||||||
(:value (mc/find-map-by-id (:collection c) k)))
|
(let [m (mc/find-map-by-id (:db c) (:collection c) k)]
|
||||||
#_ (lookup [c k not-found]
|
(:value m)))
|
||||||
(if-let [doc (mc/find-map-by-id (:collection c) k)]
|
|
||||||
(:value doc)
|
|
||||||
not-found))
|
|
||||||
(has? [c k]
|
(has? [c k]
|
||||||
(not (nil? (mc/find-by-id (get c :collection) k))))
|
(not (nil? (mc/find-by-id (:db c) (:collection c) k))))
|
||||||
(hit [this k]
|
(hit [this k]
|
||||||
this)
|
this)
|
||||||
(miss [c k v]
|
(miss [c k v]
|
||||||
(mc/insert (get c :collection) {:_id k :value v})
|
(mc/insert (:db c) (:collection c) {:_id k :value v})
|
||||||
c)
|
c)
|
||||||
(evict [c k]
|
(evict [c k]
|
||||||
(mc/remove-by-id (get c :collection) k)
|
(mc/remove-by-id (:db c) (:collection c) k)
|
||||||
c)
|
c)
|
||||||
(seed [c m]
|
(seed [c m]
|
||||||
(mc/insert-batch (get c :collection) (map (fn [[k v]]
|
(mc/insert-batch (:db c) (:collection c) (map (fn [[k v]]
|
||||||
{:_id k :value v}) m))
|
{:_id k :value v}) m))
|
||||||
c))
|
c))
|
||||||
|
|
||||||
|
|
||||||
(defn basic-monger-cache-factory
|
(defn basic-monger-cache-factory
|
||||||
([]
|
([^DB db]
|
||||||
(BasicMongerCache. default-cache-collection))
|
(BasicMongerCache. db default-cache-collection))
|
||||||
([collection]
|
([^DB db collection]
|
||||||
(BasicMongerCache. collection))
|
(BasicMongerCache. db collection))
|
||||||
([collection base]
|
([^DB db collection base]
|
||||||
(cache/seed (BasicMongerCache. collection) base)))
|
(cache/seed (BasicMongerCache. db collection) base)))
|
||||||
|
|
|
||||||
|
|
@ -1,44 +1,68 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
|
;;
|
||||||
|
;; The APL v2.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; Copyright (c) 2012 Toby Hede
|
;; Copyright (c) 2012 Toby Hede
|
||||||
;; Copyright (c) 2012 Baishampayan Ghose
|
;; Copyright (c) 2012 Baishampayan Ghose
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;; you may not use this file except in compliance with the License.
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; You may obtain a copy of the License at
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;;
|
||||||
;; the terms of this license.
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; Copyright (c) 2012 Toby Hede
|
||||||
|
;; Copyright (c) 2012 Baishampayan Ghose
|
||||||
|
;;
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns ^{:doc "Provides key functionality for interaction with MongoDB: inserting, querying, updating and deleting documents, performing Aggregation Framework
|
(ns monger.collection
|
||||||
queries, creating and dropping indexes, creating collections and more.
|
"Provides key functionality for interaction with MongoDB: inserting, querying, updating and deleting documents, performing Aggregation Framework
|
||||||
|
queries, creating and dropping indexes, creating collections and more.
|
||||||
|
|
||||||
For more advanced read queries, see monger.query.
|
For more advanced read queries, see monger.query.
|
||||||
|
|
||||||
Related documentation guides:
|
Related documentation guides:
|
||||||
|
|
||||||
* http://clojuremongodb.info/articles/getting_started.html
|
* http://clojuremongodb.info/articles/getting_started.html
|
||||||
* http://clojuremongodb.info/articles/inserting.html
|
* http://clojuremongodb.info/articles/inserting.html
|
||||||
* http://clojuremongodb.info/articles/querying.html
|
* http://clojuremongodb.info/articles/querying.html
|
||||||
* http://clojuremongodb.info/articles/updating.html
|
* http://clojuremongodb.info/articles/updating.html
|
||||||
* http://clojuremongodb.info/articles/deleting.html
|
* http://clojuremongodb.info/articles/deleting.html
|
||||||
* http://clojuremongodb.info/articles/aggregation.html"}
|
* http://clojuremongodb.info/articles/aggregation.html"
|
||||||
monger.collection
|
(:refer-clojure :exclude [find remove count drop distinct empty? any? update])
|
||||||
(:refer-clojure :exclude [find remove count drop distinct empty?])
|
(:import [com.mongodb Mongo DB DBCollection WriteResult DBObject WriteConcern
|
||||||
(:import [com.mongodb Mongo DB DBCollection WriteResult DBObject WriteConcern DBCursor MapReduceCommand MapReduceCommand$OutputType]
|
DBCursor MapReduceCommand MapReduceCommand$OutputType AggregationOutput
|
||||||
|
AggregationOptions AggregationOptions$OutputMode]
|
||||||
[java.util List Map]
|
[java.util List Map]
|
||||||
|
[java.util.concurrent TimeUnit]
|
||||||
[clojure.lang IPersistentMap ISeq]
|
[clojure.lang IPersistentMap ISeq]
|
||||||
org.bson.types.ObjectId)
|
org.bson.types.ObjectId)
|
||||||
(:require [monger core result])
|
(:require [monger.core :as mc]
|
||||||
(:use [monger.conversion]))
|
[monger.result :as mres]
|
||||||
|
[monger.conversion :refer :all]
|
||||||
;;
|
[monger.constraints :refer :all]
|
||||||
;; Implementation
|
[monger.util :refer [into-array-list]]))
|
||||||
;;
|
|
||||||
|
|
||||||
(definline check-not-nil!
|
|
||||||
[ref ^String message]
|
|
||||||
`(when (nil? ~ref)
|
|
||||||
(throw (IllegalArgumentException. ~message))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
|
|
@ -50,50 +74,50 @@
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defn ^WriteResult insert
|
(defn ^WriteResult insert
|
||||||
"Saves @document@ to @collection@. You can optionally specify WriteConcern.
|
"Saves document to collection and returns a write result monger.result/acknowledged?
|
||||||
|
and related functions operate on. You can optionally specify a WriteConcern.
|
||||||
|
|
||||||
EXAMPLES:
|
In case you need the exact inserted document returned, with the :_id key generated,
|
||||||
|
use monger.collection/insert-and-return instead."
|
||||||
(monger.collection/insert \"people\" {:name \"Joe\", :age 30})
|
([^DB db ^String coll document]
|
||||||
|
(.insert (.getCollection db (name coll))
|
||||||
(monger.collection/insert \"people\" {:name \"Joe\", :age 30, WriteConcern/SAFE})
|
|
||||||
"
|
|
||||||
([^String collection document]
|
|
||||||
(.insert (.getCollection monger.core/*mongodb-database* collection)
|
|
||||||
(to-db-object document)
|
(to-db-object document)
|
||||||
^WriteConcern monger.core/*mongodb-write-concern*))
|
^WriteConcern mc/*mongodb-write-concern*))
|
||||||
([^String collection document ^WriteConcern concern]
|
([^DB db ^String coll document ^WriteConcern concern]
|
||||||
(.insert (.getCollection monger.core/*mongodb-database* collection)
|
(.insert (.getCollection db (name coll))
|
||||||
(to-db-object document)
|
|
||||||
concern))
|
|
||||||
([^DB db ^String collection document ^WriteConcern concern]
|
|
||||||
(.insert (.getCollection db collection)
|
|
||||||
(to-db-object document)
|
(to-db-object document)
|
||||||
concern)))
|
concern)))
|
||||||
|
|
||||||
|
|
||||||
|
(defn ^clojure.lang.IPersistentMap insert-and-return
|
||||||
|
"Like monger.collection/insert but returns the inserted document as a persistent Clojure map.
|
||||||
|
|
||||||
|
If the :_id key wasn't set on the document, it will be generated and merged into the returned
|
||||||
|
map."
|
||||||
|
([^DB db ^String coll document]
|
||||||
|
(insert-and-return db coll document ^WriteConcern mc/*mongodb-write-concern*))
|
||||||
|
([^DB db ^String coll document ^WriteConcern concern]
|
||||||
|
;; MongoDB Java driver will generate the _id and set it but it
|
||||||
|
;; tries to mutate the inserted DBObject and it does not work
|
||||||
|
;; very well in our case, because that DBObject is short lived
|
||||||
|
;; and produced from the Clojure map we are passing in. Plus,
|
||||||
|
;; this approach is very awkward with immutable data structures
|
||||||
|
;; being the default. MK.
|
||||||
|
(let [doc (merge {:_id (ObjectId.)} document)]
|
||||||
|
(insert db coll doc concern)
|
||||||
|
doc)))
|
||||||
|
|
||||||
|
|
||||||
(defn ^WriteResult insert-batch
|
(defn ^WriteResult insert-batch
|
||||||
"Saves @documents@ do @collection@. You can optionally specify WriteConcern as a third argument.
|
"Saves documents to collection. You can optionally specify WriteConcern as a third argument."
|
||||||
|
([^DB db ^String coll ^List documents]
|
||||||
EXAMPLES:
|
(.insert (.getCollection db (name coll))
|
||||||
|
|
||||||
(monger.collection/insert-batch \"people\" [{:name \"Joe\", :age 30}, {:name \"Paul\", :age 27}])
|
|
||||||
|
|
||||||
(monger.collection/insert-batch \"people\" [{:name \"Joe\", :age 30}, {:name \"Paul\", :age 27}] WriteConcern/NORMAL)
|
|
||||||
|
|
||||||
"
|
|
||||||
([^String collection ^List documents]
|
|
||||||
(.insert (.getCollection monger.core/*mongodb-database* collection)
|
|
||||||
^List (to-db-object documents)
|
^List (to-db-object documents)
|
||||||
^WriteConcern monger.core/*mongodb-write-concern*))
|
^WriteConcern mc/*mongodb-write-concern*))
|
||||||
([^String collection ^List documents ^WriteConcern concern]
|
([^DB db ^String coll ^List documents ^WriteConcern concern]
|
||||||
(.insert (.getCollection monger.core/*mongodb-database* collection)
|
(.insert (.getCollection db (name coll))
|
||||||
^List (to-db-object documents)
|
^List (to-db-object documents)
|
||||||
concern))
|
concern)))
|
||||||
([^DB db ^String collection ^List documents ^WriteConcern concern]
|
|
||||||
(.insert (.getCollection db collection)
|
|
||||||
^List (to-db-object documents)
|
|
||||||
concern)))
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; monger.collection/find
|
;; monger.collection/find
|
||||||
|
|
@ -102,126 +126,83 @@
|
||||||
(defn ^DBCursor find
|
(defn ^DBCursor find
|
||||||
"Queries for objects in this collection.
|
"Queries for objects in this collection.
|
||||||
This function returns DBCursor, which allows you to iterate over DBObjects.
|
This function returns DBCursor, which allows you to iterate over DBObjects.
|
||||||
If you want to manipulate clojure sequences maps, please @find-maps@.
|
If you want to manipulate clojure sequences maps, use find-maps."
|
||||||
|
([^DB db ^String coll]
|
||||||
EXAMPLES:
|
(.find (.getCollection db (name coll))))
|
||||||
;; return all objects in this collection.
|
([^DB db ^String coll ^Map ref]
|
||||||
(mgcol/find \"people\")
|
(.find (.getCollection db (name coll))
|
||||||
|
|
||||||
;; return all objects matching query
|
|
||||||
(mgcol/find \"people\" {:company \"Comp Corp\"})
|
|
||||||
|
|
||||||
;; return all objects matching query, taking only specified fields
|
|
||||||
(mgcol/find \"people\" {:company \"Comp Corp\"} [:first_name :last_name])
|
|
||||||
"
|
|
||||||
([^String collection]
|
|
||||||
(.find (.getCollection monger.core/*mongodb-database* collection)))
|
|
||||||
([^String collection ^Map ref]
|
|
||||||
(.find (.getCollection monger.core/*mongodb-database* collection)
|
|
||||||
(to-db-object ref)))
|
(to-db-object ref)))
|
||||||
([^String collection ^Map ref fields]
|
([^DB db ^String coll ^Map ref fields]
|
||||||
(.find (.getCollection monger.core/*mongodb-database* collection)
|
(.find (.getCollection db (name coll))
|
||||||
(to-db-object ref)
|
|
||||||
(as-field-selector fields)))
|
|
||||||
([^DB db ^String collection ^Map ref fields]
|
|
||||||
(.find (.getCollection db collection)
|
|
||||||
(to-db-object ref)
|
(to-db-object ref)
|
||||||
(as-field-selector fields))))
|
(as-field-selector fields))))
|
||||||
|
|
||||||
(defn find-maps
|
(defn find-maps
|
||||||
"Queries for objects in this collection.
|
"Queries for objects in this collection.
|
||||||
This function returns clojure Seq of Maps.
|
This function returns clojure Seq of Maps.
|
||||||
If you want to work directly with DBObject, use find.
|
If you want to work directly with DBObject, use find."
|
||||||
"
|
([^DB db ^String coll]
|
||||||
([^String collection]
|
(with-open [dbc (find db coll)]
|
||||||
(map (fn [x] (from-db-object x true)) (find collection)))
|
(map (fn [x] (from-db-object x true)) dbc)))
|
||||||
([^String collection ^Map ref]
|
([^DB db ^String coll ^Map ref]
|
||||||
(map (fn [x] (from-db-object x true)) (find collection ref)))
|
(with-open [dbc (find db coll ref)]
|
||||||
([^String collection ^Map ref fields]
|
(map (fn [x] (from-db-object x true)) dbc)))
|
||||||
(map (fn [x] (from-db-object x true)) (find collection ref fields)))
|
([^DB db ^String coll ^Map ref fields]
|
||||||
([^DB db ^String collection ^Map ref fields]
|
(find-maps db coll ref fields true))
|
||||||
(map (fn [x] (from-db-object x true)) (find db collection ref fields))))
|
([^DB db ^String coll ^Map ref fields keywordize]
|
||||||
|
(with-open [dbc (find db coll ref fields)]
|
||||||
|
(map (fn [x] (from-db-object x keywordize)) dbc))))
|
||||||
|
|
||||||
(defn find-seq
|
(defn find-seq
|
||||||
"Queries for objects in this collection, returns ISeq of DBObjects."
|
"Queries for objects in this collection, returns ISeq of DBObjects."
|
||||||
([^String collection]
|
([^DB db ^String coll]
|
||||||
(seq (find collection)))
|
(with-open [dbc (find db coll)]
|
||||||
([^String collection ^Map ref]
|
(seq dbc)))
|
||||||
(seq (find collection ref)))
|
([^DB db ^String coll ^Map ref]
|
||||||
([^String collection ^Map ref fields]
|
(with-open [dbc (find db coll ref)]
|
||||||
(seq (find collection ref fields)))
|
(seq dbc)))
|
||||||
([^DB db ^String collection ^Map ref fields]
|
([^DB db ^String coll ^Map ref fields]
|
||||||
(seq (find db collection ref fields))))
|
(with-open [dbc (find db coll ref fields)]
|
||||||
|
(seq dbc))))
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; monger.collection/find-one
|
;; monger.collection/find-one
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defn ^DBObject find-one
|
(defn ^DBObject find-one
|
||||||
"Returns a single DBObject from this collection matching the query.
|
"Returns a single DBObject from this collection matching the query."
|
||||||
|
([^DB db ^String coll ^Map ref]
|
||||||
EXAMPLES:
|
(.findOne (.getCollection db (name coll))
|
||||||
|
|
||||||
(mgcol/find-one collection {:language \"Clojure\"})
|
|
||||||
|
|
||||||
;; Return only :language field.
|
|
||||||
;; Note that _id field is always returned.
|
|
||||||
(mgcol/find-one collection {:language \"Clojure\"} [:language])
|
|
||||||
|
|
||||||
"
|
|
||||||
([^String collection ^Map ref]
|
|
||||||
(.findOne (.getCollection monger.core/*mongodb-database* collection)
|
|
||||||
(to-db-object ref)))
|
(to-db-object ref)))
|
||||||
([^String collection ^Map ref fields]
|
([^DB db ^String coll ^Map ref fields]
|
||||||
(.findOne (.getCollection monger.core/*mongodb-database* collection)
|
(.findOne (.getCollection db (name coll))
|
||||||
(to-db-object ref)
|
|
||||||
^DBObject (as-field-selector fields)))
|
|
||||||
([^DB db ^String collection ^Map ref fields]
|
|
||||||
(.findOne (.getCollection db collection)
|
|
||||||
(to-db-object ref)
|
(to-db-object ref)
|
||||||
^DBObject (as-field-selector fields))))
|
^DBObject (as-field-selector fields))))
|
||||||
|
|
||||||
(defn ^IPersistentMap find-one-as-map
|
(defn ^IPersistentMap find-one-as-map
|
||||||
"Returns a single object converted to Map from this collection matching the query."
|
"Returns a single object converted to Map from this collection matching the query."
|
||||||
([^String collection ^Map ref]
|
([^DB db ^String coll ^Map ref]
|
||||||
(from-db-object ^DBObject (find-one collection ref) true))
|
(from-db-object ^DBObject (find-one db coll ref) true))
|
||||||
([^String collection ^Map ref fields]
|
([^DB db ^String coll ^Map ref fields]
|
||||||
(from-db-object ^DBObject (find-one collection ref fields) true))
|
(from-db-object ^DBObject (find-one db coll ref fields) true))
|
||||||
([^String collection ^Map ref fields keywordize]
|
([^DB db ^String coll ^Map ref fields keywordize]
|
||||||
(from-db-object ^DBObject (find-one collection ref fields) keywordize)))
|
(from-db-object ^DBObject (find-one db coll ref fields) keywordize)))
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; monger.collection/find-and-modify
|
;; monger.collection/find-and-modify
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defn ^DBObject find-and-modify
|
(defn ^IPersistentMap find-and-modify
|
||||||
"Atomically modify a document (at most one) and return it.
|
"Atomically modify a document (at most one) and return it."
|
||||||
|
([^DB db ^String coll ^Map conditions ^Map document {:keys [fields sort remove return-new upsert keywordize] :or
|
||||||
EXAMPLES:
|
{fields nil
|
||||||
|
sort nil
|
||||||
;; Find and modify a document
|
remove false
|
||||||
(mgcol/find-and-modify collection {:language \"Python\"} {:language \"Clojure\"})
|
return-new false
|
||||||
|
upsert false
|
||||||
;; If multiple documents match, choose the first one in the specified order
|
keywordize true}}]
|
||||||
(mgcol/find-and-modify collection {:language \"Python\"} {:language \"Clojure\"} :sort {:language -1})
|
(let [coll (.getCollection db (name coll))
|
||||||
|
|
||||||
;; Remove the object before returning
|
|
||||||
(mgcol/find-and-modify collection {:language \"Python\"} {} :remove true)
|
|
||||||
|
|
||||||
;; Return the modified object instead of the old one
|
|
||||||
(mgcol/find-and-modify collection {:language \"Python\"} {:language \"Clojure\"} :return-new true)
|
|
||||||
|
|
||||||
;; Retrieve a subset of fields
|
|
||||||
(mgcol/find-and-modify collection {:language \"Python\"} {:language \"Clojure\"} :fields [ :language ])
|
|
||||||
|
|
||||||
;; Create the object if it doesn't exist
|
|
||||||
(mgcol/find-and-modify collection {:language \"Factor\"} {:language \"Clojure\"} :upsert true)
|
|
||||||
|
|
||||||
"
|
|
||||||
([^String collection ^Map conditions ^Map document & {:keys [fields sort remove return-new upsert keywordize] :or
|
|
||||||
{fields nil sort nil remove false return-new false upsert false keywordize true}}]
|
|
||||||
(let [coll (.getCollection monger.core/*mongodb-database* collection)
|
|
||||||
maybe-fields (when fields (as-field-selector fields))
|
maybe-fields (when fields (as-field-selector fields))
|
||||||
maybe-sort (when sort (to-db-object sort))]
|
maybe-sort (when sort (to-db-object sort))]
|
||||||
(from-db-object
|
(from-db-object
|
||||||
|
|
@ -233,150 +214,122 @@
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defn ^DBObject find-by-id
|
(defn ^DBObject find-by-id
|
||||||
"Returns a single object with matching _id field.
|
"Returns a single object with matching _id field."
|
||||||
|
([^DB db ^String coll id]
|
||||||
EXAMPLES:
|
|
||||||
|
|
||||||
(mgcol/find-one-by-id collection (ObjectId. \"4ef45ab4744e9fd632640e2d\"))
|
|
||||||
|
|
||||||
;; Return only :language field.
|
|
||||||
;; Note that _id field is always returned.
|
|
||||||
(mgcol/find-one-by-id collection (ObjectId. \"4ef45ab4744e9fd632640e2d\") [:language])
|
|
||||||
"
|
|
||||||
([^String collection id]
|
|
||||||
(check-not-nil! id "id must not be nil")
|
(check-not-nil! id "id must not be nil")
|
||||||
(find-one collection {:_id id}))
|
(find-one db coll {:_id id}))
|
||||||
([^String collection id fields]
|
([^DB db ^String coll id fields]
|
||||||
(check-not-nil! id "id must not be nil")
|
(check-not-nil! id "id must not be nil")
|
||||||
(find-one collection {:_id id} fields))
|
(find-one db coll {:_id id} fields)))
|
||||||
([^DB db ^String collection id fields]
|
|
||||||
(check-not-nil! id "id must not be nil")
|
|
||||||
(find-one db collection {:_id id} fields)))
|
|
||||||
|
|
||||||
(defn ^IPersistentMap find-map-by-id
|
(defn ^IPersistentMap find-map-by-id
|
||||||
"Returns a single object, converted to map with matching _id field."
|
"Returns a single object, converted to map with matching _id field."
|
||||||
([^String collection id]
|
([^DB db ^String coll id]
|
||||||
(check-not-nil! id "id must not be nil")
|
(check-not-nil! id "id must not be nil")
|
||||||
(from-db-object ^DBObject (find-one-as-map collection {:_id id}) true))
|
(find-one-as-map db coll {:_id id}))
|
||||||
([^String collection id fields]
|
([^DB db ^String coll id fields]
|
||||||
(check-not-nil! id "id must not be nil")
|
(check-not-nil! id "id must not be nil")
|
||||||
(from-db-object ^DBObject (find-one-as-map collection {:_id id} fields) true))
|
(find-one-as-map db coll {:_id id} fields))
|
||||||
([^String collection id fields keywordize]
|
([^DB db ^String coll id fields keywordize]
|
||||||
(check-not-nil! id "id must not be nil")
|
(check-not-nil! id "id must not be nil")
|
||||||
(from-db-object ^DBObject (find-one-as-map collection {:_id id} fields) keywordize)))
|
(find-one-as-map db coll {:_id id} fields keywordize)))
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; monger.collection/group
|
|
||||||
;;
|
|
||||||
|
|
||||||
|
|
||||||
;; TBD
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; monger.collection/count
|
;; monger.collection/count
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defn count
|
(defn count
|
||||||
"Returns the number of documents in this collection.
|
"Returns the number of documents in this collection.
|
||||||
|
|
||||||
Takes optional conditions as an argument.
|
Takes optional conditions as an argument."
|
||||||
|
(^long [^DB db ^String coll]
|
||||||
(monger.collection/count collection)
|
(.count (.getCollection db (name coll))))
|
||||||
|
(^long [^DB db ^String coll ^Map conditions]
|
||||||
(monger.collection/count collection {:first_name \"Paul\"})"
|
(.count (.getCollection db (name coll)) (to-db-object conditions))))
|
||||||
(^long [^String collection]
|
|
||||||
(.count (.getCollection monger.core/*mongodb-database* collection)))
|
|
||||||
(^long [^String collection ^Map conditions]
|
|
||||||
(.count (.getCollection monger.core/*mongodb-database* collection) (to-db-object conditions)))
|
|
||||||
(^long [^DB db ^String collection ^Map conditions]
|
|
||||||
(.count (.getCollection db collection) (to-db-object conditions))))
|
|
||||||
|
|
||||||
(defn any?
|
(defn any?
|
||||||
"Wether the collection has any items at all, or items matching query.
|
"Whether the collection has any items at all, or items matching query."
|
||||||
|
([^DB db ^String coll]
|
||||||
EXAMPLES:
|
(> (count db coll) 0))
|
||||||
|
([^DB db ^String coll ^Map conditions]
|
||||||
;; wether the collection has any items
|
(> (count db coll conditions) 0)))
|
||||||
(mgcol/any? collection)
|
|
||||||
|
|
||||||
(mgcol/any? collection {:language \"Clojure\"}))
|
|
||||||
"
|
|
||||||
([^String collection]
|
|
||||||
(> (count collection) 0))
|
|
||||||
([^String collection ^Map conditions]
|
|
||||||
(> (count collection conditions) 0))
|
|
||||||
([^DB db ^String collection ^Map conditions]
|
|
||||||
(> (count db collection conditions) 0)))
|
|
||||||
|
|
||||||
|
|
||||||
(defn empty?
|
(defn empty?
|
||||||
"Wether the collection is empty.
|
"Whether the collection is empty."
|
||||||
|
[^DB db ^String coll]
|
||||||
EXAMPLES:
|
(= (count db coll {}) 0))
|
||||||
(mgcol/empty? \"things\")
|
|
||||||
"
|
|
||||||
([^String collection]
|
|
||||||
(= (count collection) 0))
|
|
||||||
([^DB db ^String collection]
|
|
||||||
(= (count db collection {}) 0)))
|
|
||||||
|
|
||||||
;; monger.collection/update
|
;; monger.collection/update
|
||||||
|
|
||||||
(defn ^WriteResult update
|
(defn ^WriteResult update
|
||||||
"Performs an update operation.
|
"Performs an update operation.
|
||||||
|
|
||||||
Please note that update is potentially destructive operation. It will update your document with the given set
|
Please note that update is potentially destructive operation. It updates document with the given set
|
||||||
emptying the fields not mentioned in (^Map document). In order to only change certain fields, please use
|
emptying the fields not mentioned in the new document. In order to only change certain fields, use
|
||||||
\"$set\".
|
\"$set\".
|
||||||
|
|
||||||
EXAMPLES
|
You can use all the MongoDB modifier operations ($inc, $set, $unset, $push, $pushAll, $addToSet, $pop, $pull
|
||||||
|
$pullAll, $rename, $bit) here as well.
|
||||||
(monger.collection/update \"people\" {:first_name \"Raul\"} {\"$set\" {:first_name \"Paul\"}})
|
|
||||||
|
|
||||||
You can use all the Mongodb Modifier Operations ($inc, $set, $unset, $push, $pushAll, $addToSet, $pop, $pull
|
|
||||||
$pullAll, $rename, $bit) here, as well
|
|
||||||
|
|
||||||
EXAMPLES
|
|
||||||
|
|
||||||
(monger.collection/update \"people\" {:first_name \"Paul\"} {\"$set\" {:index 1}})
|
|
||||||
(monger.collection/update \"people\" {:first_name \"Paul\"} {\"$inc\" {:index 5}})
|
|
||||||
|
|
||||||
(monger.collection/update \"people\" {:first_name \"Paul\"} {\"$unset\" {:years_on_stage 1}})
|
|
||||||
|
|
||||||
It also takes modifiers, such as :upsert and :multi.
|
|
||||||
|
|
||||||
EXAMPLES
|
|
||||||
|
|
||||||
;; add :band field to all the records found in \"people\" collection, otherwise only the first matched record
|
|
||||||
;; will be updated
|
|
||||||
(monger.collection/update \"people\" {} {\"$set\" {:band \"The Beatles\"}} :multi true)
|
|
||||||
|
|
||||||
;; inserts the record if it did not exist in the collection
|
|
||||||
(monger.collection/update \"people\" {:first_name \"Yoko\"} {:first_name \"Yoko\" :last_name \"Ono\"} :upsert true)
|
|
||||||
|
|
||||||
|
It also takes options, such as :upsert and :multi.
|
||||||
By default :upsert and :multi are false."
|
By default :upsert and :multi are false."
|
||||||
([^String collection ^Map conditions ^Map document & {:keys [upsert multi write-concern] :or {upsert false
|
([^DB db ^String coll ^Map conditions ^Map document]
|
||||||
multi false
|
(update db coll conditions document {}))
|
||||||
write-concern monger.core/*mongodb-write-concern*}}]
|
([^DB db ^String coll ^Map conditions ^Map document {:keys [upsert multi write-concern]
|
||||||
(.update (.getCollection monger.core/*mongodb-database* collection)
|
:or {upsert false
|
||||||
|
multi false
|
||||||
|
write-concern mc/*mongodb-write-concern*}}]
|
||||||
|
(.update (.getCollection db (name coll))
|
||||||
(to-db-object conditions)
|
(to-db-object conditions)
|
||||||
(to-db-object document)
|
(to-db-object document)
|
||||||
upsert
|
upsert
|
||||||
multi
|
multi
|
||||||
write-concern)))
|
write-concern)))
|
||||||
|
|
||||||
|
(defn ^WriteResult upsert
|
||||||
|
"Performs an upsert.
|
||||||
|
|
||||||
|
This is a convenience function that delegates to monger.collection/update and
|
||||||
|
sets :upsert to true.
|
||||||
|
|
||||||
|
See monger.collection/update documentation"
|
||||||
|
([^DB db ^String coll ^Map conditions ^Map document]
|
||||||
|
(upsert db coll conditions document {}))
|
||||||
|
([^DB db ^String coll ^Map conditions ^Map document {:keys [multi write-concern]
|
||||||
|
:or {multi false
|
||||||
|
write-concern mc/*mongodb-write-concern*}}]
|
||||||
|
(update db coll conditions document {:multi multi :write-concern write-concern :upsert true})))
|
||||||
|
|
||||||
(defn ^WriteResult update-by-id
|
(defn ^WriteResult update-by-id
|
||||||
"Update a document with given id"
|
"Update a document with given id"
|
||||||
[^String collection id ^Map document & {:keys [upsert write-concern] :or {upsert false
|
([^DB db ^String coll id ^Map document]
|
||||||
write-concern monger.core/*mongodb-write-concern*}}]
|
(update-by-id db coll id document {}))
|
||||||
(check-not-nil! id "id must not be nil")
|
([^DB db ^String coll id ^Map document {:keys [upsert write-concern]
|
||||||
(.update (.getCollection monger.core/*mongodb-database* collection)
|
:or {upsert false
|
||||||
(to-db-object {:_id id})
|
write-concern mc/*mongodb-write-concern*}}]
|
||||||
(to-db-object document)
|
(check-not-nil! id "id must not be nil")
|
||||||
upsert
|
(.update (.getCollection db (name coll))
|
||||||
false
|
(to-db-object {:_id id})
|
||||||
write-concern))
|
(to-db-object document)
|
||||||
|
upsert
|
||||||
|
false
|
||||||
|
write-concern)))
|
||||||
|
|
||||||
|
(defn ^WriteResult update-by-ids
|
||||||
|
"Update documents by given ids"
|
||||||
|
([^DB db ^String coll ids ^Map document]
|
||||||
|
(update-by-ids db coll ids document {}))
|
||||||
|
([^DB db ^String coll ids ^Map document {:keys [upsert write-concern]
|
||||||
|
:or {upsert false
|
||||||
|
write-concern mc/*mongodb-write-concern*}}]
|
||||||
|
(check-not-nil! (seq ids) "ids must not be nil or empty")
|
||||||
|
(.update (.getCollection db (name coll))
|
||||||
|
(to-db-object {:_id {"$in" ids}})
|
||||||
|
(to-db-object document)
|
||||||
|
upsert
|
||||||
|
true
|
||||||
|
write-concern)))
|
||||||
|
|
||||||
|
|
||||||
;; monger.collection/save
|
;; monger.collection/save
|
||||||
|
|
@ -387,76 +340,71 @@
|
||||||
If the object is not present in the database, insert operation will be performed.
|
If the object is not present in the database, insert operation will be performed.
|
||||||
If the object is already in the database, it will be updated.
|
If the object is already in the database, it will be updated.
|
||||||
|
|
||||||
EXAMPLES
|
This function returns write result. If you want to get the exact persisted document back,
|
||||||
|
use `save-and-return`."
|
||||||
(monger.collection/save \"people\" {:first_name \"Ian\" :last_name \"Gillan\"})
|
([^DB db ^String coll ^Map document]
|
||||||
"
|
(.save (.getCollection db (name coll))
|
||||||
([^String collection ^Map document]
|
(to-db-object document)
|
||||||
(.save (.getCollection monger.core/*mongodb-database* collection)
|
mc/*mongodb-write-concern*))
|
||||||
|
([^DB db ^String coll ^Map document ^WriteConcern write-concern]
|
||||||
|
(.save (.getCollection db (name coll))
|
||||||
(to-db-object document)
|
(to-db-object document)
|
||||||
monger.core/*mongodb-write-concern*))
|
|
||||||
([^String collection ^Map document ^WriteConcern write-concern]
|
|
||||||
(.save (.getCollection monger.core/*mongodb-database* collection)
|
|
||||||
document
|
|
||||||
write-concern))
|
|
||||||
([^DB db ^String collection ^Map document ^WriteConcern write-concern]
|
|
||||||
(.save (.getCollection db collection)
|
|
||||||
document
|
|
||||||
write-concern)))
|
write-concern)))
|
||||||
|
|
||||||
|
(defn ^clojure.lang.IPersistentMap save-and-return
|
||||||
|
"Saves an object to the given collection (does insert or update based on the object _id).
|
||||||
|
|
||||||
|
If the object is not present in the database, insert operation will be performed.
|
||||||
|
If the object is already in the database, it will be updated.
|
||||||
|
|
||||||
|
This function returns the exact persisted document back, including the `:_id` key in
|
||||||
|
case of an insert.
|
||||||
|
|
||||||
|
If you want to get write result back, use `save`."
|
||||||
|
([^DB db ^String coll ^Map document]
|
||||||
|
(save-and-return db coll document ^WriteConcern mc/*mongodb-write-concern*))
|
||||||
|
([^DB db ^String coll ^Map document ^WriteConcern write-concern]
|
||||||
|
;; see the comment in insert-and-return. Here we additionally need to make sure to not scrap the :_id key if
|
||||||
|
;; it is already present. MK.
|
||||||
|
(let [doc (merge {:_id (ObjectId.)} document)]
|
||||||
|
(save db coll doc write-concern)
|
||||||
|
doc)))
|
||||||
|
|
||||||
|
|
||||||
;; monger.collection/remove
|
;; monger.collection/remove
|
||||||
|
|
||||||
(defn ^WriteResult remove
|
(defn ^WriteResult remove
|
||||||
"Removes objects from the database.
|
"Removes objects from the database."
|
||||||
|
([^DB db ^String coll]
|
||||||
EXAMPLES
|
(.remove (.getCollection db (name coll)) (to-db-object {})))
|
||||||
|
([^DB db ^String coll ^Map conditions]
|
||||||
(monger.collection/remove collection) ;; Removes all documents from DB
|
(.remove (.getCollection db (name coll)) (to-db-object conditions))))
|
||||||
|
|
||||||
(monger.collection/remove collection {:language \"Clojure\"}) ;; Removes documents based on given query
|
|
||||||
|
|
||||||
"
|
|
||||||
([^String collection]
|
|
||||||
(.remove (.getCollection monger.core/*mongodb-database* collection) (to-db-object {})))
|
|
||||||
([^String collection ^Map conditions]
|
|
||||||
(.remove (.getCollection monger.core/*mongodb-database* collection) (to-db-object conditions)))
|
|
||||||
([^DB db ^String collection ^Map conditions]
|
|
||||||
(.remove (.getCollection db collection) (to-db-object conditions))))
|
|
||||||
|
|
||||||
|
|
||||||
(defn ^WriteResult remove-by-id
|
(defn ^WriteResult remove-by-id
|
||||||
"Removes a single document with given id"
|
"Removes a single document with given id"
|
||||||
([^String collection id]
|
[^DB db ^String coll id]
|
||||||
(remove-by-id monger.core/*mongodb-database* collection id))
|
(check-not-nil! id "id must not be nil")
|
||||||
([^DB db ^String collection id]
|
(let [coll (.getCollection db (name coll))]
|
||||||
(check-not-nil! id "id must not be nil")
|
(.remove coll (to-db-object {:_id id}))))
|
||||||
(let [coll (.getCollection db collection)]
|
|
||||||
(.remove coll (to-db-object {:_id id})))))
|
|
||||||
|
|
||||||
|
(defn purge-many
|
||||||
|
"Purges (removes all documents from) multiple collections. Intended
|
||||||
|
to be used in test environments."
|
||||||
|
[^DB db xs]
|
||||||
|
(doseq [coll xs]
|
||||||
|
(remove db coll)))
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; monger.collection/create-index
|
;; monger.collection/create-index
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defn create-index
|
(defn create-index
|
||||||
"Forces creation of index on a set of fields, if one does not already exists.
|
"Forces creation of index on a set of fields, if one does not already exists."
|
||||||
|
([^DB db ^String coll ^Map keys]
|
||||||
EXAMPLES
|
(.createIndex (.getCollection db (name coll)) (as-field-selector keys)))
|
||||||
|
([^DB db ^String coll ^Map keys ^Map options]
|
||||||
;; Will create an index on the \"language\" field
|
(.createIndex (.getCollection db (name coll))
|
||||||
(monger.collection/create-index collection {\"language\" 1})
|
|
||||||
(monger.collection/create-index collection {\"language\" 1} {:unique true :name \"unique_language\"})
|
|
||||||
|
|
||||||
"
|
|
||||||
([^String collection ^Map keys]
|
|
||||||
(.createIndex (.getCollection monger.core/*mongodb-database* collection) (as-field-selector keys)))
|
|
||||||
([^String collection ^Map keys options]
|
|
||||||
(.createIndex (.getCollection monger.core/*mongodb-database* collection)
|
|
||||||
(as-field-selector keys)
|
|
||||||
(to-db-object options)))
|
|
||||||
([^DB db ^String collection ^Map keys ^Map options]
|
|
||||||
(.createIndex (.getCollection db collection)
|
|
||||||
(as-field-selector keys)
|
(as-field-selector keys)
|
||||||
(to-db-object options))))
|
(to-db-object options))))
|
||||||
|
|
||||||
|
|
@ -472,25 +420,17 @@
|
||||||
Options are:
|
Options are:
|
||||||
|
|
||||||
:unique (boolean) to create a unique index
|
:unique (boolean) to create a unique index
|
||||||
:name (string) to specify a custom index name and not rely on the generated one
|
:name (string) to specify a custom index name and not rely on the generated one"
|
||||||
|
([^DB db ^String coll ^Map keys]
|
||||||
EXAMPLES
|
(.createIndex (.getCollection db (name coll)) (as-field-selector keys)))
|
||||||
|
([^DB db ^String coll ^Map keys ^Map options]
|
||||||
;; create a regular index
|
(.createIndex (.getCollection db (name coll))
|
||||||
(monger.collection/ensure-index \"documents\" {\"language\" 1})
|
|
||||||
;; create a unique index
|
|
||||||
(monger.collection/ensure-index \"pages\" {:url 1} {:unique true})
|
|
||||||
"
|
|
||||||
([^String collection ^Map keys]
|
|
||||||
(.ensureIndex (.getCollection monger.core/*mongodb-database* collection) (as-field-selector keys)))
|
|
||||||
([^String collection ^Map keys ^Map options]
|
|
||||||
(.ensureIndex (.getCollection monger.core/*mongodb-database* collection)
|
|
||||||
(as-field-selector keys)
|
(as-field-selector keys)
|
||||||
(to-db-object options)))
|
(to-db-object options)))
|
||||||
([^String collection ^Map keys ^String name ^Boolean unique?]
|
([^DB db ^String coll ^Map keys ^String index-name unique?]
|
||||||
(.ensureIndex (.getCollection monger.core/*mongodb-database* collection)
|
(.createIndex (.getCollection db (name coll))
|
||||||
(as-field-selector keys)
|
(as-field-selector keys)
|
||||||
name
|
index-name
|
||||||
unique?)))
|
unique?)))
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -499,15 +439,9 @@
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defn indexes-on
|
(defn indexes-on
|
||||||
"Return a list of the indexes for this collection.
|
"Return a list of the indexes for this collection."
|
||||||
|
[^DB db ^String coll]
|
||||||
EXAMPLES
|
(from-db-object (.getIndexInfo (.getCollection db (name coll))) true))
|
||||||
|
|
||||||
(monger.collection/indexes-on collection)
|
|
||||||
|
|
||||||
"
|
|
||||||
[^String collection]
|
|
||||||
(from-db-object (.getIndexInfo (.getCollection monger.core/*mongodb-database* collection)) true))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
|
|
@ -516,17 +450,15 @@
|
||||||
|
|
||||||
(defn drop-index
|
(defn drop-index
|
||||||
"Drops an index from this collection."
|
"Drops an index from this collection."
|
||||||
([^String collection ^String name]
|
[^DB db ^String coll idx]
|
||||||
(.dropIndex (.getCollection monger.core/*mongodb-database* collection) name))
|
(if (string? idx)
|
||||||
([^DB db ^String collection ^String name]
|
(.dropIndex (.getCollection db (name coll)) ^String idx)
|
||||||
(.dropIndex (.getCollection db collection) name)))
|
(.dropIndex (.getCollection db (name coll)) (to-db-object idx))))
|
||||||
|
|
||||||
(defn drop-indexes
|
(defn drop-indexes
|
||||||
"Drops all indixes from this collection."
|
"Drops all indixes from this collection."
|
||||||
([^String collection]
|
[^DB db ^String coll]
|
||||||
(.dropIndexes (.getCollection monger.core/*mongodb-database* collection)))
|
(.dropIndexes (.getCollection db (name coll))))
|
||||||
([^DB db ^String collection]
|
|
||||||
(.dropIndexes (.getCollection db collection))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
|
|
@ -535,49 +467,32 @@
|
||||||
|
|
||||||
|
|
||||||
(defn exists?
|
(defn exists?
|
||||||
"Checks weather collection with certain name exists.
|
"Checks whether collection with certain name exists."
|
||||||
|
([^DB db ^String coll]
|
||||||
EXAMPLE:
|
(.collectionExists db coll)))
|
||||||
|
|
||||||
(monger.collection/exists? \"coll\")
|
|
||||||
"
|
|
||||||
([^String collection]
|
|
||||||
(.collectionExists monger.core/*mongodb-database* collection))
|
|
||||||
([^DB db ^String collection]
|
|
||||||
(.collectionExists db collection)))
|
|
||||||
|
|
||||||
(defn create
|
(defn create
|
||||||
"Creates a collection with a given name and options."
|
"Creates a collection with a given name and options.
|
||||||
([^String collection ^Map options]
|
|
||||||
(.createCollection monger.core/*mongodb-database* collection (to-db-object options)))
|
Options are:
|
||||||
([^DB db ^String collection ^Map options]
|
|
||||||
(.createCollection db collection (to-db-object options))))
|
:capped (pass true to create a capped collection)
|
||||||
|
:max (number of documents)
|
||||||
|
:size (max allowed size of the collection, in bytes)"
|
||||||
|
[^DB db ^String coll ^Map options]
|
||||||
|
(.createCollection db coll (to-db-object options)))
|
||||||
|
|
||||||
(defn drop
|
(defn drop
|
||||||
"Deletes collection from database.
|
"Deletes collection from database."
|
||||||
|
[^DB db ^String coll]
|
||||||
EXAMPLE:
|
(.drop (.getCollection db (name coll))))
|
||||||
|
|
||||||
(monger.collection/drop \"collection-to-drop\")
|
|
||||||
"
|
|
||||||
([^String collection]
|
|
||||||
(.drop (.getCollection monger.core/*mongodb-database* collection)))
|
|
||||||
([^DB db ^String collection]
|
|
||||||
(.drop (.getCollection db collection))))
|
|
||||||
|
|
||||||
(defn rename
|
(defn rename
|
||||||
"Renames collection.
|
"Renames collection."
|
||||||
|
([^DB db ^String from, ^String to]
|
||||||
EXAMPLE:
|
(.rename (.getCollection db (name from)) (name to)))
|
||||||
|
([^DB db ^String from ^String to drop-target?]
|
||||||
(monger.collection/rename \"old_name\" \"new_name\")
|
(.rename (.getCollection db (name from)) (name to) drop-target?)))
|
||||||
"
|
|
||||||
([^String from, ^String to]
|
|
||||||
(.rename (.getCollection monger.core/*mongodb-database* from) to))
|
|
||||||
([^String from ^String to ^Boolean drop-target]
|
|
||||||
(.rename (.getCollection monger.core/*mongodb-database* from) to drop-target))
|
|
||||||
([^DB db ^String from ^String to ^Boolean drop-target]
|
|
||||||
(.rename (.getCollection db from) to drop-target)))
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; Map/Reduce
|
;; Map/Reduce
|
||||||
|
|
@ -585,11 +500,11 @@
|
||||||
|
|
||||||
(defn map-reduce
|
(defn map-reduce
|
||||||
"Performs a map reduce operation"
|
"Performs a map reduce operation"
|
||||||
([^String collection ^String js-mapper ^String js-reducer ^String output ^Map query]
|
([^DB db ^String coll ^String js-mapper ^String js-reducer ^String output ^Map query]
|
||||||
(let [coll (.getCollection monger.core/*mongodb-database* collection)]
|
(let [coll (.getCollection db (name coll))]
|
||||||
(.mapReduce coll js-mapper js-reducer output (to-db-object query))))
|
(.mapReduce coll js-mapper js-reducer output (to-db-object query))))
|
||||||
([^String collection ^String js-mapper ^String js-reducer ^String output ^MapReduceCommand$OutputType output-type ^Map query]
|
([^DB db ^String coll ^String js-mapper ^String js-reducer ^String output ^MapReduceCommand$OutputType output-type ^Map query]
|
||||||
(let [coll (.getCollection monger.core/*mongodb-database* collection)]
|
(let [coll (.getCollection db (name coll))]
|
||||||
(.mapReduce coll js-mapper js-reducer output output-type (to-db-object query)))))
|
(.mapReduce coll js-mapper js-reducer output output-type (to-db-object query)))))
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -599,39 +514,55 @@
|
||||||
|
|
||||||
(defn distinct
|
(defn distinct
|
||||||
"Finds distinct values for a key"
|
"Finds distinct values for a key"
|
||||||
([^String collection ^String key]
|
([^DB db ^String coll ^String key]
|
||||||
(.distinct (.getCollection monger.core/*mongodb-database* collection) ^String (to-db-object key)))
|
(.distinct (.getCollection db (name coll)) ^String (to-db-object key)))
|
||||||
([^String collection ^String key ^Map query]
|
([^DB db ^String coll ^String key ^Map query]
|
||||||
(.distinct (.getCollection monger.core/*mongodb-database* collection) ^String (to-db-object key) (to-db-object query)))
|
(.distinct (.getCollection db (name coll)) ^String (to-db-object key) (to-db-object query))))
|
||||||
([^DB db ^String collection ^String key ^Map query]
|
|
||||||
(.distinct (.getCollection db collection) ^String (to-db-object key) (to-db-object query))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; create/capped collections
|
|
||||||
;;
|
|
||||||
|
|
||||||
(defn create
|
|
||||||
"Creates a collection. Options are: :capped (pass true to create a capped collection), :max (number of documents)
|
|
||||||
and :size (max allowed size of the collection, in bytes)."
|
|
||||||
[^String collection options]
|
|
||||||
(.createCollection ^DB monger.core/*mongodb-database* collection (to-db-object options)))
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; Aggregation
|
;; Aggregation
|
||||||
;;
|
;;
|
||||||
|
|
||||||
|
(defn- build-aggregation-options
|
||||||
|
^AggregationOptions
|
||||||
|
[{:keys [^Boolean allow-disk-use cursor ^Long max-time]}]
|
||||||
|
(cond-> (AggregationOptions/builder)
|
||||||
|
allow-disk-use (.allowDiskUse allow-disk-use)
|
||||||
|
cursor (.outputMode AggregationOptions$OutputMode/CURSOR)
|
||||||
|
max-time (.maxTime max-time TimeUnit/MILLISECONDS)
|
||||||
|
(:batch-size cursor) (.batchSize (int (:batch-size cursor)))
|
||||||
|
true .build))
|
||||||
|
|
||||||
(defn aggregate
|
(defn aggregate
|
||||||
"Performs aggregation query. MongoDB 2.1/2.2+ only.
|
"Executes an aggregation query. MongoDB 2.2+ only.
|
||||||
|
Accepts the options :allow-disk-use and :cursor (a map with the :batch-size
|
||||||
|
key), as described in the MongoDB manual. Additionally, the :max-time option
|
||||||
|
is supported, for specifying a limit on the execution time of the query in
|
||||||
|
milliseconds.
|
||||||
|
|
||||||
|
:keywordize option that control if resulting map keys will be turned into keywords, default is true.
|
||||||
|
|
||||||
See http://docs.mongodb.org/manual/applications/aggregation/ to learn more."
|
See http://docs.mongodb.org/manual/applications/aggregation/ to learn more."
|
||||||
[^String collection stages]
|
[^DB db ^String coll stages & opts]
|
||||||
(let [res (monger.core/command {:aggregate collection :pipeline stages})]
|
(let [coll (.getCollection db (name coll))
|
||||||
;; this is what DBCollection#distinct does. Turning a blind eye!
|
agg-opts (build-aggregation-options opts)
|
||||||
(.throwOnError res)
|
pipe (into-array-list (to-db-object stages))
|
||||||
(map #(from-db-object % true) (.get res "result"))))
|
res (.aggregate coll pipe agg-opts)
|
||||||
|
{:keys [^Boolean keywordize]
|
||||||
|
:or {keywordize true}} opts]
|
||||||
|
(map #(from-db-object % keywordize) (iterator-seq res))))
|
||||||
|
|
||||||
|
(defn explain-aggregate
|
||||||
|
"Returns the explain plan for an aggregation query. MongoDB 2.2+ only.
|
||||||
|
|
||||||
|
See http://docs.mongodb.org/manual/applications/aggregation/ to learn more."
|
||||||
|
[^DB db ^String coll stages & opts]
|
||||||
|
(let [coll (.getCollection db (name coll))
|
||||||
|
agg-opts (build-aggregation-options opts)
|
||||||
|
pipe (into-array-list (to-db-object stages))
|
||||||
|
res (.explainAggregate coll pipe agg-opts)]
|
||||||
|
(from-db-object res true)))
|
||||||
;;
|
;;
|
||||||
;; Misc
|
;; Misc
|
||||||
;;
|
;;
|
||||||
|
|
@ -642,5 +573,5 @@
|
||||||
(defn system-collection?
|
(defn system-collection?
|
||||||
"Evaluates to true if the given collection name refers to a system collection. System collections
|
"Evaluates to true if the given collection name refers to a system collection. System collections
|
||||||
are prefixed with system. or fs. (default GridFS collection prefix)"
|
are prefixed with system. or fs. (default GridFS collection prefix)"
|
||||||
[^String collection]
|
[^String coll]
|
||||||
(re-find system-collection-pattern collection))
|
(re-find system-collection-pattern coll))
|
||||||
|
|
|
||||||
|
|
@ -1,85 +1,108 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
|
;;
|
||||||
|
;; The APL v2.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; Copyright (c) 2012 Toby Hede
|
;; Copyright (c) 2012 Toby Hede
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;; you may not use this file except in compliance with the License.
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; You may obtain a copy of the License at
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;;
|
||||||
;; the terms of this license.
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; Copyright (c) 2012 Toby Hede
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns ^{:doc "Provides convenience functions for performing most commonly used MongoDB commands.
|
(ns monger.command
|
||||||
For a lower-level API that gives maximum flexibility, see `monger.core/command`. To use
|
"Provides convenience functions for performing most commonly used MongoDB commands.
|
||||||
MongoDB 2.2 Aggregation Framework, see `monger.collection/aggregate`.
|
For a lower-level API that gives maximum flexibility, see `monger.core/command`. To use
|
||||||
|
MongoDB 2.2 Aggregation Framework, see `monger.collection/aggregate`.
|
||||||
|
|
||||||
Related documentation guides:
|
Related documentation guides:
|
||||||
|
|
||||||
* http://clojuremongodb.info/articles/commands.html
|
* http://clojuremongodb.info/articles/commands.html
|
||||||
* http://clojuremongodb.info/articles/aggregation.html
|
* http://clojuremongodb.info/articles/aggregation.html
|
||||||
* http://clojuremongodb.info/articles/mapreduce.html"}
|
* http://clojuremongodb.info/articles/mapreduce.html"
|
||||||
monger.command
|
(:require monger.core
|
||||||
(:require monger.core)
|
[monger.conversion :refer :all])
|
||||||
(:use monger.conversion)
|
(:import [com.mongodb MongoClient DB DBObject]))
|
||||||
(:import com.mongodb.DB))
|
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; API
|
||||||
|
;;
|
||||||
|
|
||||||
|
(defn admin-command
|
||||||
|
"Executes a command on the admin database"
|
||||||
|
[^MongoClient conn m]
|
||||||
|
(monger.core/command (monger.core/admin-db conn) m))
|
||||||
|
|
||||||
|
(defn raw-admin-command
|
||||||
|
"Executes a command on the admin database"
|
||||||
|
[^MongoClient conn ^DBObject cmd]
|
||||||
|
(monger.core/raw-command (monger.core/admin-db conn) cmd))
|
||||||
|
|
||||||
(defn collection-stats
|
(defn collection-stats
|
||||||
([collection]
|
[^DB database collection]
|
||||||
(collection-stats monger.core/*mongodb-database* collection))
|
(monger.core/command database {:collstats collection}))
|
||||||
([^DB database collection]
|
|
||||||
(monger.core/command database { :collstats collection })))
|
|
||||||
|
|
||||||
(defn db-stats
|
(defn db-stats
|
||||||
([]
|
[^DB database]
|
||||||
(db-stats monger.core/*mongodb-database*))
|
(monger.core/command database {:dbStats 1}))
|
||||||
([^DB database]
|
|
||||||
(monger.core/command database {:dbStats 1 })))
|
|
||||||
|
|
||||||
|
|
||||||
(defn reindex-collection
|
(defn reindex-collection
|
||||||
"Forces an existing collection to be reindexed using the reindexCollection command"
|
"Forces an existing collection to be reindexed using the reindexCollection command"
|
||||||
([^String collection]
|
[^DB database ^String collection]
|
||||||
(reindex-collection monger.core/*mongodb-database* collection))
|
(monger.core/command database {:reIndex collection}))
|
||||||
([^DB database ^String collection]
|
|
||||||
(monger.core/command database { :reIndex collection })))
|
|
||||||
|
|
||||||
(defn rename-collection
|
(defn rename-collection
|
||||||
"Changes the name of an existing collection using the renameCollection command"
|
"Changes the name of an existing collection using the renameCollection command"
|
||||||
([^String from ^String to]
|
[^DB db ^String from ^String to]
|
||||||
(reindex-collection monger.core/*mongodb-database* from to))
|
(monger.core/command db (sorted-map :renameCollection from :to to)))
|
||||||
([^DB database ^String from ^String to]
|
|
||||||
(monger.core/command database { :renameCollection from :to to })))
|
|
||||||
|
|
||||||
(defn convert-to-capped
|
(defn convert-to-capped
|
||||||
"Converts an existing, non-capped collection to a capped collection using the convertToCapped command"
|
"Converts an existing, non-capped collection to a capped collection using the convertToCapped command"
|
||||||
([^String collection ^long size]
|
[^DB db ^String collection ^long size]
|
||||||
(convert-to-capped monger.core/*mongodb-database* collection size))
|
(monger.core/command db (sorted-map :convertToCapped collection :size size)))
|
||||||
([^DB database ^String collection ^long size]
|
|
||||||
(monger.core/command database {:convertToCapped collection :size size})))
|
|
||||||
|
|
||||||
(defn empty-capped
|
(defn empty-capped
|
||||||
"Removes all documents from a capped collection using the emptycapped command"
|
"Removes all documents from a capped collection using the emptycapped command"
|
||||||
([^String collection]
|
[^DB db ^String collection]
|
||||||
(empty-capped monger.core/*mongodb-database* collection))
|
(monger.core/command db {:emptycapped collection}))
|
||||||
([^DB database ^String collection]
|
|
||||||
(monger.core/command database {:emptycapped collection})))
|
|
||||||
|
|
||||||
|
|
||||||
(defn compact
|
(defn compact
|
||||||
"Rewrites and defragments a single collection using the compact command. This also forces all indexes on the collection to be rebuilt"
|
"Rewrites and defragments a single collection using the compact command. This also forces all indexes on the collection to be rebuilt"
|
||||||
([^String collection]
|
[^DB db ^String collection]
|
||||||
(compact monger.core/*mongodb-database* collection))
|
(monger.core/command db {:compact collection}))
|
||||||
([^DB database ^String collection]
|
|
||||||
(monger.core/command database {:compact collection})))
|
|
||||||
|
|
||||||
|
|
||||||
(defn server-status
|
(defn server-status
|
||||||
([]
|
[^DB db]
|
||||||
(server-status monger.core/*mongodb-database*))
|
(monger.core/command db {:serverStatus 1}))
|
||||||
([^DB database]
|
|
||||||
(monger.core/command database {:serverStatus 1 })))
|
|
||||||
|
|
||||||
|
|
||||||
(defn top
|
(defn top
|
||||||
[]
|
[^MongoClient conn]
|
||||||
(monger.core/command (monger.core/get-db "admin") {:top 1}))
|
(monger.core/command (monger.core/admin-db conn) {:top 1}))
|
||||||
|
|
|
||||||
44
src/clojure/monger/constraints.clj
Normal file
44
src/clojure/monger/constraints.clj
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
|
;;
|
||||||
|
;; The APL v2.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
|
;;
|
||||||
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
(ns monger.constraints)
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; API
|
||||||
|
;;
|
||||||
|
|
||||||
|
(definline check-not-nil!
|
||||||
|
[ref ^String message]
|
||||||
|
`(when (nil? ~ref)
|
||||||
|
(throw (IllegalArgumentException. ~message))))
|
||||||
|
|
@ -1,40 +1,53 @@
|
||||||
;; Original author is Andrew Boekhoff
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
;;
|
;;
|
||||||
|
;; The APL v2.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
;; Portions of the code are Copyright (c) 2009 Andrew Boekhoff
|
;; Portions of the code are Copyright (c) 2009 Andrew Boekhoff
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;;
|
;;
|
||||||
;; Permission is hereby granted, free of charge, to any person obtaining a copy
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
;; of this software and associated documentation files (the "Software"), to deal
|
;; you may not use this file except in compliance with the License.
|
||||||
;; in the Software without restriction, including without limitation the rights
|
;; You may obtain a copy of the License at
|
||||||
;; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
;; copies of the Software, and to permit persons to whom the Software is
|
|
||||||
;; furnished to do so, subject to the following conditions:
|
|
||||||
;;
|
;;
|
||||||
;; The above copyright notice and this permission notice shall be included in
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
;; all copies or substantial portions of the Software.
|
|
||||||
;;
|
;;
|
||||||
;; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
;; See the License for the specific language governing permissions and
|
||||||
;; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
;; limitations under the License.
|
||||||
;; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
;; ----------------------------------------------------------------------------------
|
||||||
;; THE SOFTWARE.
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Portions of the code are Copyright (c) 2009 Andrew Boekhoff
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns ^{:doc "Provides functions that convert between MongoDB Java driver classes (DBObject, DBList) and Clojure
|
(ns monger.conversion
|
||||||
data structures (maps, collections). Most of the time, application developers won't need to use these
|
"Provides functions that convert between MongoDB Java driver classes (DBObject, DBList) and Clojure
|
||||||
functions directly because Monger Query DSL and many other functions convert documents to Clojure sequences and
|
data structures (maps, collections). Most of the time, application developers won't need to use these
|
||||||
maps automatically. However, this namespace is part of the public API and guaranteed to be stable between minor releases.
|
functions directly because Monger Query DSL and many other functions convert documents to Clojure sequences and
|
||||||
|
maps automatically. However, this namespace is part of the public API and guaranteed to be stable between minor releases.
|
||||||
|
|
||||||
Related documentation guides:
|
Related documentation guides:
|
||||||
|
|
||||||
* http://clojuremongodb.info/articles/inserting.html
|
* http://clojuremongodb.info/articles/inserting.html
|
||||||
* http://clojuremongodb.info/articles/querying.html"}
|
* http://clojuremongodb.info/articles/querying.html"
|
||||||
monger.conversion
|
|
||||||
(:import [com.mongodb DBObject BasicDBObject BasicDBList DBCursor]
|
(:import [com.mongodb DBObject BasicDBObject BasicDBList DBCursor]
|
||||||
[clojure.lang IPersistentMap Named Keyword Ratio]
|
[clojure.lang IPersistentMap Named Keyword Ratio]
|
||||||
[java.util List Map Date Set]
|
[java.util List Map Date Set]
|
||||||
org.bson.types.ObjectId))
|
org.bson.types.ObjectId
|
||||||
|
(org.bson.types Decimal128)))
|
||||||
|
|
||||||
(defprotocol ConvertToDBObject
|
(defprotocol ConvertToDBObject
|
||||||
(^com.mongodb.DBObject to-db-object [input] "Converts given piece of Clojure data to BasicDBObject MongoDB Java driver uses"))
|
(^com.mongodb.DBObject to-db-object [input] "Converts given piece of Clojure data to BasicDBObject MongoDB Java driver uses"))
|
||||||
|
|
@ -42,7 +55,7 @@
|
||||||
(extend-protocol ConvertToDBObject
|
(extend-protocol ConvertToDBObject
|
||||||
nil
|
nil
|
||||||
(to-db-object [input]
|
(to-db-object [input]
|
||||||
input)
|
nil)
|
||||||
|
|
||||||
String
|
String
|
||||||
(to-db-object [^String input]
|
(to-db-object [^String input]
|
||||||
|
|
@ -82,8 +95,8 @@
|
||||||
DBObject
|
DBObject
|
||||||
(to-db-object [^DBObject input] input)
|
(to-db-object [^DBObject input] input)
|
||||||
|
|
||||||
com.novemberain.monger.DBRef
|
com.mongodb.DBRef
|
||||||
(to-db-object [^com.novemberain.monger.DBRef dbref]
|
(to-db-object [^com.mongodb.DBRef dbref]
|
||||||
dbref)
|
dbref)
|
||||||
|
|
||||||
Object
|
Object
|
||||||
|
|
@ -92,55 +105,44 @@
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(declare associate-pairs)
|
|
||||||
(defprotocol ConvertFromDBObject
|
(defprotocol ConvertFromDBObject
|
||||||
(from-db-object [input keywordize] "Converts given DBObject instance to a piece of Clojure data"))
|
(from-db-object [input keywordize] "Converts given DBObject instance to a piece of Clojure data"))
|
||||||
|
|
||||||
(extend-protocol ConvertFromDBObject
|
(extend-protocol ConvertFromDBObject
|
||||||
nil
|
nil
|
||||||
(from-db-object [input keywordize] input)
|
(from-db-object [_ _] nil)
|
||||||
|
|
||||||
Object
|
Object
|
||||||
(from-db-object [input keywordize] input)
|
(from-db-object [input _] input)
|
||||||
|
|
||||||
Map
|
Decimal128
|
||||||
(from-db-object [^Map input keywordize]
|
(from-db-object [^Decimal128 input _]
|
||||||
(associate-pairs (.entrySet input) keywordize))
|
(.bigDecimalValue input))
|
||||||
|
|
||||||
List
|
List
|
||||||
(from-db-object [^List input keywordize]
|
(from-db-object [^List input keywordize]
|
||||||
(vec (map #(from-db-object % keywordize) input)))
|
(mapv #(from-db-object % keywordize) input))
|
||||||
|
|
||||||
BasicDBList
|
BasicDBList
|
||||||
(from-db-object [^BasicDBList input keywordize]
|
(from-db-object [^BasicDBList input keywordize]
|
||||||
(vec (map #(from-db-object % keywordize) input)))
|
(mapv #(from-db-object % keywordize) input))
|
||||||
|
|
||||||
com.mongodb.DBRef
|
com.mongodb.DBRef
|
||||||
(from-db-object [^com.mongodb.DBRef input keywordize]
|
(from-db-object [^com.mongodb.DBRef input _]
|
||||||
(com.novemberain.monger.DBRef. input))
|
input)
|
||||||
|
|
||||||
DBObject
|
DBObject
|
||||||
(from-db-object [^DBObject input keywordize]
|
(from-db-object [^DBObject input keywordize]
|
||||||
;; DBObject provides .toMap, but the implementation in
|
;; DBObject provides .toMap, but the implementation in
|
||||||
;; subclass GridFSFile unhelpfully throws
|
;; subclass GridFSFile unhelpfully throws
|
||||||
;; UnsupportedOperationException. This part is taken from congomongo and
|
;; UnsupportedOperationException.
|
||||||
;; may need revisiting at a later point. MK.
|
(persistent!
|
||||||
(associate-pairs (for [key-set (.keySet input)] [key-set (.get input key-set)])
|
(reduce (if keywordize
|
||||||
keywordize)))
|
(fn [m ^String k]
|
||||||
|
(assoc! m (keyword k) (from-db-object (.get input k) true)))
|
||||||
|
(fn [m ^String k]
|
||||||
(defn- associate-pairs [pairs keywordize]
|
(assoc! m k (from-db-object (.get input k) false))))
|
||||||
;; Taking the keywordize test out of the fn reduces derefs
|
(transient {}) (.keySet input)))))
|
||||||
;; dramatically, which was the main barrier to matching pure-Java
|
|
||||||
;; performance for this marshalling. Taken from congomongo. MK.
|
|
||||||
(reduce (if keywordize
|
|
||||||
(fn [m [^String k v]]
|
|
||||||
(assoc m (keyword k) (from-db-object v true)))
|
|
||||||
(fn [m [^String k v]]
|
|
||||||
(assoc m k (from-db-object v false))))
|
|
||||||
{} (reverse pairs)))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(defprotocol ConvertToObjectId
|
(defprotocol ConvertToObjectId
|
||||||
|
|
|
||||||
|
|
@ -1,28 +1,53 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; The APL v2.0:
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;;
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; ----------------------------------------------------------------------------------
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; the terms of this license.
|
;;
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns ^{:author "Michael S. Klishin"
|
(ns monger.core
|
||||||
:doc "Thin idiomatic wrapper around MongoDB Java client. monger.core includes
|
"Thin idiomatic wrapper around MongoDB Java client. monger.core includes
|
||||||
fundamental functions that perform database/replica set connection, set default write concern, default database, performing commands
|
fundamental functions that perform database/replica set connection, set default write concern, default database, performing commands
|
||||||
and so on. Most of the functionality is in other monger.* namespaces, in particular monger.collection, monger.query and monger.gridfs
|
and so on. Most of the functionality is in other monger.* namespaces, in particular monger.collection, monger.query and monger.gridfs
|
||||||
|
|
||||||
Related documentation guides:
|
Related documentation guides:
|
||||||
|
|
||||||
* http://clojuremongodb.info/articles/connecting.html
|
* http://clojuremongodb.info/articles/connecting.html
|
||||||
* http://clojuremongodb.info/articles/commands.html
|
* http://clojuremongodb.info/articles/commands.html
|
||||||
* http://clojuremongodb.info/articles/gridfs.html"}
|
* http://clojuremongodb.info/articles/gridfs.html"
|
||||||
monger.core
|
|
||||||
(:refer-clojure :exclude [count])
|
(:refer-clojure :exclude [count])
|
||||||
(:use [monger.conversion])
|
(:require [monger.conversion :refer :all]
|
||||||
(:import [com.mongodb Mongo MongoURI DB WriteConcern DBObject DBCursor Bytes MongoOptions ServerAddress MapReduceOutput]
|
[monger.util :refer [into-array-list]])
|
||||||
|
(:import [com.mongodb MongoClient MongoClientURI MongoCredential DB WriteConcern DBObject DBCursor Bytes
|
||||||
|
MongoClientOptions MongoClientOptions$Builder ServerAddress MapReduceOutput MongoException]
|
||||||
[com.mongodb.gridfs GridFS]
|
[com.mongodb.gridfs GridFS]
|
||||||
[java.util Map ArrayList]))
|
[java.util Map]))
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; Defaults
|
;; Defaults
|
||||||
|
|
@ -31,103 +56,78 @@
|
||||||
(def ^:dynamic ^String *mongodb-host* "127.0.0.1")
|
(def ^:dynamic ^String *mongodb-host* "127.0.0.1")
|
||||||
(def ^:dynamic ^long *mongodb-port* 27017)
|
(def ^:dynamic ^long *mongodb-port* 27017)
|
||||||
|
|
||||||
(declare ^:dynamic ^Mongo *mongodb-connection*)
|
(def ^:dynamic ^WriteConcern *mongodb-write-concern* WriteConcern/ACKNOWLEDGED)
|
||||||
(declare ^:dynamic ^DB *mongodb-database*)
|
|
||||||
(def ^:dynamic ^WriteConcern *mongodb-write-concern* WriteConcern/SAFE)
|
|
||||||
|
|
||||||
(declare ^:dynamic ^GridFS *mongodb-gridfs*)
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; API
|
;; API
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defn ^com.mongodb.Mongo connect
|
(defn ^MongoClient connect
|
||||||
"Connects to MongoDB. When used without arguments, connects to
|
"Connects to MongoDB. When used without arguments, connects to
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
:host (*mongodb-host* by default)
|
:host (\"127.0.0.1\" by default)
|
||||||
:port (*mongodb-port* by default)
|
:port (27017 by default)"
|
||||||
|
|
||||||
EXAMPLES
|
|
||||||
|
|
||||||
(monger.core/connect)
|
|
||||||
(monger.core/connect { :host \"db3.intranet.local\", :port 27787 })
|
|
||||||
|
|
||||||
;; Connecting to a replica set with a couple of seeds
|
|
||||||
(let [^MongoOptions opts (mg/mongo-options :threads-allowed-to-block-for-connection-multiplier 300)
|
|
||||||
seeds [[\"192.168.1.1\" 27017] [\"192.168.1.2\" 27017] [\"192.168.1.1\" 27018]]
|
|
||||||
sas (map #(apply mg/server-address %) seeds)]
|
|
||||||
(mg/connect! sas opts))
|
|
||||||
"
|
|
||||||
{:arglists '([]
|
{:arglists '([]
|
||||||
[server-address options]
|
[server-address options]
|
||||||
|
[server-address options credentials]
|
||||||
[[server-address & more] options]
|
[[server-address & more] options]
|
||||||
[{ :keys [host port uri] :or { host *mongodb-host* port *mongodb-port* }}])}
|
[{:keys [host port uri] :or { host *mongodb-host* port *mongodb-port*}}])}
|
||||||
([]
|
([]
|
||||||
(Mongo.))
|
(MongoClient.))
|
||||||
([server-address ^MongoOptions options]
|
([server-address ^MongoClientOptions options]
|
||||||
(if (coll? server-address)
|
(if (coll? server-address)
|
||||||
;; connect to a replica set
|
;; connect to a replica set
|
||||||
(let [server-list ^ArrayList (ArrayList. ^java.util.Collection server-address)]
|
(let [server-list (into-array-list server-address)]
|
||||||
(Mongo. server-list options))
|
(MongoClient. server-list options))
|
||||||
;; connect to a single instance
|
;; connect to a single instance
|
||||||
(Mongo. ^ServerAddress server-address options)))
|
(MongoClient. ^ServerAddress server-address options)))
|
||||||
|
([server-address ^MongoClientOptions options credentials]
|
||||||
|
(let [creds (into-array-list (if (coll? credentials)
|
||||||
|
credentials
|
||||||
|
[credentials]))]
|
||||||
|
(if (coll? server-address)
|
||||||
|
(let [server-list (into-array-list server-address)]
|
||||||
|
(MongoClient. server-list ^java.util.List creds options))
|
||||||
|
(MongoClient. ^ServerAddress server-address ^java.util.List creds options))))
|
||||||
([{ :keys [host port uri] :or { host *mongodb-host* port *mongodb-port* }}]
|
([{ :keys [host port uri] :or { host *mongodb-host* port *mongodb-port* }}]
|
||||||
(Mongo. ^String host ^Long port)))
|
(if uri
|
||||||
|
(MongoClient. (MongoClientURI. uri))
|
||||||
|
(MongoClient. ^String host ^Long port))))
|
||||||
|
|
||||||
|
(defn ^MongoClient connect-with-credentials
|
||||||
|
"Connect with provided credentials and default options"
|
||||||
|
([credentials]
|
||||||
|
(connect-with-credentials *mongodb-host* *mongodb-port* credentials))
|
||||||
|
([^String hostname credentials]
|
||||||
|
(connect-with-credentials hostname *mongodb-port* credentials))
|
||||||
|
([^String hostname ^long port credentials]
|
||||||
|
(MongoClient. (into-array-list [(ServerAddress. hostname port)])
|
||||||
|
(into-array-list (if (coll? credentials)
|
||||||
|
credentials
|
||||||
|
[credentials])))))
|
||||||
|
|
||||||
(defn get-db-names
|
(defn get-db-names
|
||||||
"Gets a list of all database names present on the server"
|
"Gets a list of all database names present on the server"
|
||||||
([]
|
[^MongoClient conn]
|
||||||
(get-db-names *mongodb-connection*))
|
(set (.getDatabaseNames conn)))
|
||||||
([^Mongo connection]
|
|
||||||
(set (.getDatabaseNames connection))))
|
|
||||||
|
|
||||||
|
|
||||||
(defn ^com.mongodb.DB get-db
|
(defn ^DB get-db
|
||||||
"Get database reference by name.
|
"Get database reference by name."
|
||||||
|
[^MongoClient conn ^String name]
|
||||||
|
(.getDB conn name))
|
||||||
|
|
||||||
EXAMPLES
|
(defn drop-db
|
||||||
|
"Drops a database"
|
||||||
(monger.core/get-db \"myapp_production\")
|
[^MongoClient conn ^String db]
|
||||||
(monger.core/get-db connection \"myapp_production\")"
|
(.dropDatabase conn db))
|
||||||
([]
|
|
||||||
*mongodb-database*)
|
|
||||||
([^String name]
|
|
||||||
(.getDB *mongodb-connection* name))
|
|
||||||
([^Mongo connection ^String name]
|
|
||||||
(.getDB connection name)))
|
|
||||||
|
|
||||||
(defn ^com.mongodb.DB current-db
|
|
||||||
"Returns currently used database"
|
|
||||||
[]
|
|
||||||
*mongodb-database*)
|
|
||||||
|
|
||||||
(defn authenticate
|
|
||||||
([^String db ^String username ^chars password]
|
|
||||||
(authenticate *mongodb-connection* db username password))
|
|
||||||
([^Mongo connection ^String db ^String username ^chars password]
|
|
||||||
(.authenticate (.getDB connection db) username password)))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(defmacro with-connection
|
|
||||||
[conn & body]
|
|
||||||
`(binding [*mongodb-connection* ~conn]
|
|
||||||
(do ~@body)))
|
|
||||||
|
|
||||||
|
|
||||||
(defmacro with-db
|
|
||||||
[db & body]
|
|
||||||
`(binding [*mongodb-database* ~db]
|
|
||||||
(do ~@body)))
|
|
||||||
|
|
||||||
(defmacro with-gridfs
|
|
||||||
[fs & body]
|
|
||||||
`(binding [*mongodb-gridfs* ~fs]
|
|
||||||
(do ~@body)))
|
|
||||||
|
|
||||||
|
(defn ^GridFS get-gridfs
|
||||||
|
"Get GridFS for the given database."
|
||||||
|
[^MongoClient conn ^String name]
|
||||||
|
(GridFS. (.getDB conn name)))
|
||||||
|
|
||||||
(defn server-address
|
(defn server-address
|
||||||
([^String hostname]
|
([^String hostname]
|
||||||
|
|
@ -135,158 +135,165 @@
|
||||||
([^String hostname ^Long port]
|
([^String hostname ^Long port]
|
||||||
(ServerAddress. hostname port)))
|
(ServerAddress. hostname port)))
|
||||||
|
|
||||||
|
(defn ^MongoClientOptions$Builder mongo-options-builder
|
||||||
(defn mongo-options
|
[{:keys [add-cluster-listener add-cluster-listeners add-command-listener add-command-listeners
|
||||||
[& { :keys [connections-per-host threads-allowed-to-block-for-connection-multiplier
|
add-connection-pool-listener add-connection-pool-listeners add-server-listener add-server-listeners
|
||||||
max-wait-time connect-timeout socket-timeout socket-keep-alive auto-connect-retry max-auto-connect-retry-time
|
add-server-monitor-listener add-server-monitor-listeners always-use-mbeans application-name
|
||||||
safe w w-timeout fsync j] :or [auto-connect-retry true] }]
|
codec-registry compressor-list connect-timeout connections-per-host cursor-finalizer-enabled
|
||||||
(let [mo (MongoOptions.)]
|
db-decoder-factory db-encoder-factory description heartbeat-connect-timeout heartbeat-frequency
|
||||||
|
heartbeat-socket-timeout local-threshold max-connection-idle-time max-connection-life-time
|
||||||
|
max-wait-time min-connections-per-host min-heartbeat-frequency read-concern read-preference
|
||||||
|
required-replica-set-name retry-writes server-selection-timeout server-selector socket-keep-alive
|
||||||
|
socket-factory socket-timeout ssl-context ssl-enabled ssl-invalid-host-name-allowed
|
||||||
|
threads-allowed-to-block-for-connection-multiplier uuid-representation write-concern]}]
|
||||||
|
(let [mob (MongoClientOptions$Builder.)]
|
||||||
|
(when add-cluster-listener
|
||||||
|
(.addClusterListener mob add-cluster-listener))
|
||||||
|
(when add-cluster-listeners
|
||||||
|
(doseq [cluster-listener add-cluster-listeners]
|
||||||
|
(.addClusterListener mob cluster-listener)))
|
||||||
|
(when add-command-listener
|
||||||
|
(.addCommandListener mob add-command-listener))
|
||||||
|
(when add-command-listeners
|
||||||
|
(doseq [command-listener add-command-listeners]
|
||||||
|
(.addCommandListener mob command-listener)))
|
||||||
|
(when add-connection-pool-listener
|
||||||
|
(.addConnectionPoolListener mob add-connection-pool-listener))
|
||||||
|
(when add-connection-pool-listeners
|
||||||
|
(doseq [connection-pool-listener add-connection-pool-listeners]
|
||||||
|
(.addConnectionPoolListener mob connection-pool-listener)))
|
||||||
|
(when add-server-listener
|
||||||
|
(.addServerListener mob add-server-listener))
|
||||||
|
(when add-server-listeners
|
||||||
|
(doseq [server-listener add-server-listeners]
|
||||||
|
(.addServerListener mob server-listener)))
|
||||||
|
(when add-server-monitor-listener
|
||||||
|
(.addServerMonitorListener mob add-server-monitor-listener))
|
||||||
|
(when add-server-monitor-listeners
|
||||||
|
(doseq [server-monitor-listener add-server-monitor-listeners]
|
||||||
|
(.addServerMonitorListener mob server-monitor-listener)))
|
||||||
|
(when always-use-mbeans
|
||||||
|
(.alwaysUseMBeans mob always-use-mbeans))
|
||||||
|
(when application-name
|
||||||
|
(.applicationName mob application-name))
|
||||||
|
(when always-use-mbeans
|
||||||
|
(.alwaysUseMBeans mob always-use-mbeans))
|
||||||
|
(when codec-registry
|
||||||
|
(.codecRegistry mob codec-registry))
|
||||||
|
(when compressor-list
|
||||||
|
(.compressorList mob compressor-list))
|
||||||
(when connections-per-host
|
(when connections-per-host
|
||||||
(set! (. mo connectionsPerHost) connections-per-host))
|
(.connectionsPerHost mob connections-per-host))
|
||||||
(when threads-allowed-to-block-for-connection-multiplier
|
|
||||||
(set! (. mo threadsAllowedToBlockForConnectionMultiplier) threads-allowed-to-block-for-connection-multiplier))
|
|
||||||
(when max-wait-time
|
|
||||||
(set! (. mo maxWaitTime) max-wait-time))
|
|
||||||
(when connect-timeout
|
(when connect-timeout
|
||||||
(set! (. mo connectTimeout) connect-timeout))
|
(.connectTimeout mob connect-timeout))
|
||||||
(when socket-timeout
|
(when cursor-finalizer-enabled
|
||||||
(set! (. mo socketTimeout) socket-timeout))
|
(.cursorFinalizerEnabled mob cursor-finalizer-enabled))
|
||||||
|
(when db-decoder-factory
|
||||||
|
(.dbDecoderFactory mob db-decoder-factory))
|
||||||
|
(when db-encoder-factory
|
||||||
|
(.dbEncoderFactory mob db-encoder-factory))
|
||||||
|
(when description
|
||||||
|
(.description mob description))
|
||||||
|
(when heartbeat-connect-timeout
|
||||||
|
(.heartbeatConnectTimeout mob heartbeat-connect-timeout))
|
||||||
|
(when heartbeat-frequency
|
||||||
|
(.heartbeatFrequency mob heartbeat-frequency))
|
||||||
|
(when heartbeat-socket-timeout
|
||||||
|
(.heartbeatSocketTimeout mob heartbeat-socket-timeout))
|
||||||
|
(when ssl-context
|
||||||
|
(.sslContext mob ssl-context))
|
||||||
|
(when local-threshold
|
||||||
|
(.localThreshold mob local-threshold))
|
||||||
|
(when max-connection-idle-time
|
||||||
|
(.maxConnectionIdleTime mob max-connection-idle-time))
|
||||||
|
(when max-wait-time
|
||||||
|
(.maxWaitTime mob max-wait-time))
|
||||||
|
(when max-connection-life-time
|
||||||
|
(.maxConnectionLifeTime mob max-connection-life-time))
|
||||||
|
(when min-connections-per-host
|
||||||
|
(.minConnectionsPerHost mob min-connections-per-host))
|
||||||
|
(when min-heartbeat-frequency
|
||||||
|
(.minHeartbeatFrequency mob min-heartbeat-frequency))
|
||||||
|
(when read-concern
|
||||||
|
(.readConcern mob read-concern))
|
||||||
|
(when read-preference
|
||||||
|
(.readPreference mob read-preference))
|
||||||
|
(when required-replica-set-name
|
||||||
|
(.requiredReplicaSetName mob required-replica-set-name))
|
||||||
|
(when retry-writes
|
||||||
|
(.retryWrites mob retry-writes))
|
||||||
|
(when server-selection-timeout
|
||||||
|
(.serverSelectionTimeout mob server-selection-timeout))
|
||||||
|
(when server-selector
|
||||||
|
(.serverSelector mob server-selector))
|
||||||
(when socket-keep-alive
|
(when socket-keep-alive
|
||||||
(set! (. mo socketKeepAlive) socket-keep-alive))
|
(.socketKeepAlive mob socket-keep-alive))
|
||||||
(when auto-connect-retry
|
(when socket-factory
|
||||||
(set! (. mo autoConnectRetry) auto-connect-retry))
|
(.socketFactory mob socket-factory))
|
||||||
(when max-auto-connect-retry-time
|
(when socket-timeout
|
||||||
(set! (. mo maxAutoConnectRetryTime) max-auto-connect-retry-time))
|
(.socketTimeout mob socket-timeout))
|
||||||
(when safe
|
(when ssl-enabled
|
||||||
(set! (. mo safe) safe))
|
(.sslEnabled mob ssl-enabled))
|
||||||
(when w
|
(when ssl-invalid-host-name-allowed
|
||||||
(set! (. mo w) w))
|
(.sslInvalidHostNameAllowed mob ssl-invalid-host-name-allowed))
|
||||||
(when w-timeout
|
(when threads-allowed-to-block-for-connection-multiplier
|
||||||
(set! (. mo wtimeout) w-timeout))
|
(.threadsAllowedToBlockForConnectionMultiplier mob threads-allowed-to-block-for-connection-multiplier))
|
||||||
(when j
|
(when uuid-representation
|
||||||
(set! (. mo j) j))
|
(.uuidRepresentation mob uuid-representation))
|
||||||
(when fsync
|
(when write-concern
|
||||||
(set! (. mo fsync) fsync))
|
(.writeConcern mob write-concern))
|
||||||
mo))
|
mob))
|
||||||
|
|
||||||
|
(defn ^MongoClientOptions mongo-options
|
||||||
|
[opts]
|
||||||
|
(let [mob (mongo-options-builder opts)]
|
||||||
|
(.build mob)))
|
||||||
|
|
||||||
(defn set-connection!
|
(defn disconnect
|
||||||
"Sets given MongoDB connection as default by altering *mongodb-connection* var"
|
"Closes default connection to MongoDB"
|
||||||
^Mongo [^Mongo conn]
|
[^MongoClient conn]
|
||||||
(alter-var-root (var *mongodb-connection*) (constantly conn)))
|
(.close conn))
|
||||||
|
|
||||||
(defn connect!
|
(def ^:const admin-db-name "admin")
|
||||||
"Connect to MongoDB, store connection in the *mongodb-connection* var"
|
|
||||||
^Mongo [& args]
|
|
||||||
(let [c (apply connect args)]
|
|
||||||
(set-connection! c)))
|
|
||||||
|
|
||||||
|
(defn ^DB admin-db
|
||||||
|
"Returns admin database"
|
||||||
(defn set-db!
|
[^MongoClient conn]
|
||||||
"Sets *mongodb-database* var to given db, updates *mongodb-gridfs* var state. Recommended to be used for
|
(get-db conn admin-db-name))
|
||||||
applications that only use one database."
|
|
||||||
[db]
|
|
||||||
(alter-var-root (var *mongodb-database*) (constantly db))
|
|
||||||
(alter-var-root (var *mongodb-gridfs*) (constantly (GridFS. db))))
|
|
||||||
|
|
||||||
|
|
||||||
(def ^{:doc "Combines set-db! and get-db, so (use-db \"mydb\") is the same as (set-db! (get-db \"mydb\"))"}
|
|
||||||
use-db! (comp set-db! get-db))
|
|
||||||
|
|
||||||
|
|
||||||
(defn set-default-write-concern!
|
(defn set-default-write-concern!
|
||||||
[wc]
|
[wc]
|
||||||
"Set *mongodb-write-concert* var to :wc
|
"Sets *mongodb-write-concert*"
|
||||||
|
|
||||||
Unlike the official Java driver, Monger uses WriteConcern/SAFE by default. We think defaults should be safe first
|
|
||||||
and WebScale fast second."
|
|
||||||
(alter-var-root #'*mongodb-write-concern* (constantly wc)))
|
(alter-var-root #'*mongodb-write-concern* (constantly wc)))
|
||||||
|
|
||||||
|
|
||||||
(defn connect-via-uri!
|
(defn connect-via-uri
|
||||||
"Connects to MongoDB using a URI, sets up default connection and database. Commonly used for PaaS-based applications,
|
"Connects to MongoDB using a URI, returns the connection and database as a map with :conn and :db.
|
||||||
for example, running on Heroku. If username and password are provided, performs authentication."
|
Commonly used for PaaS-based applications, for example, running on Heroku.
|
||||||
[uri]
|
If username and password are provided, performs authentication."
|
||||||
(let [uri (MongoURI. uri)
|
[^String uri-string]
|
||||||
;; yes, you are not hallucinating. A class named MongoURI has a method called connectDB.
|
(let [uri (MongoClientURI. uri-string)
|
||||||
;; I call it "college OOP". Or maybe "don't give a shit" OOP.
|
conn (MongoClient. uri)]
|
||||||
db (.connectDB uri)
|
(if-let [dbName (.getDatabase uri)]
|
||||||
conn (.getMongo db)
|
{:conn conn :db (.getDB conn dbName)}
|
||||||
user (.getUsername uri)
|
(throw (IllegalArgumentException. "No database name specified in URI. Monger requires a database to be explicitly configured.")))))
|
||||||
pwd (.getPassword uri)]
|
|
||||||
;; I hope that whoever wrote the MongoDB Java driver connection/authentication parts
|
|
||||||
;; wasn't sober while at it. MK.
|
|
||||||
;;
|
|
||||||
;; First we set connection, then DB, then authentcate
|
|
||||||
(set-connection! conn)
|
|
||||||
(when (and user pwd)
|
|
||||||
(when-not (authenticate (.getName db) user pwd)
|
|
||||||
(throw (IllegalArgumentException. (format "Could not authenticate with MongoDB. Either database name or credentials are invalid. Database name: %s, username: %s" (.getName db) user)))))
|
|
||||||
;; only do this *after* we authenticated because set-db! will try to set up a default GridFS instance. MK.
|
|
||||||
(when db
|
|
||||||
(set-db! db))
|
|
||||||
conn))
|
|
||||||
|
|
||||||
|
|
||||||
(defn ^com.mongodb.CommandResult command
|
(defn ^com.mongodb.CommandResult command
|
||||||
"Runs a database command (please check MongoDB documentation for the complete list of commands). Some common commands
|
"Runs a database command (please check MongoDB documentation for the complete list of commands).
|
||||||
are:
|
|
||||||
|
|
||||||
{ :buildinfo 1 } returns version number and build information about the current MongoDB server, should be executed via admin DB.
|
Ordering of keys in the command document may matter. Please use sorted maps instead of map literals, for example:
|
||||||
|
(array-map :near 50 :test 430 :num 10)
|
||||||
|
|
||||||
{ :collstats collection-name [ :scale scale ] } returns stats about given collection.
|
For commonly used commands (distinct, count, map/reduce, etc), use monger.command and monger.collection functions such as
|
||||||
|
/distinct, /count, /drop, /dropIndexes, and /mapReduce respectively."
|
||||||
|
[^DB database ^Map cmd]
|
||||||
|
(.command ^DB database ^DBObject (to-db-object cmd)))
|
||||||
|
|
||||||
{ :dbStats 1 } returns the stats of current database
|
(defn ^com.mongodb.CommandResult raw-command
|
||||||
|
"Like monger.core/command but accepts DBObjects"
|
||||||
{ :dropDatabase 1 } deletes the current database
|
[^DB database ^DBObject cmd]
|
||||||
|
(.command database cmd))
|
||||||
{ :findAndModify find-and-modify-config } runs find, modify and return for the given query.
|
|
||||||
Takes :query, :sory, :remove, :update, :new, :fields and :upsert arguments.
|
|
||||||
Please refer MongoDB documentation for details. http://www.mongodb.org/display/DOCS/findAndModify+Command
|
|
||||||
|
|
||||||
{ :fsync config } performs a full fsync, that flushes all pending writes to database, provides an optional write lock that will make
|
|
||||||
backups easier.
|
|
||||||
Please refer MongoDB documentation for details :http://www.mongodb.org/display/DOCS/fsync+Command
|
|
||||||
|
|
||||||
{ :getLastError 1 } returns the status of the last operation on current connection.
|
|
||||||
|
|
||||||
{ :group group-config } performs grouping aggregation, docs and support for grouping are TBD in Monger.
|
|
||||||
|
|
||||||
{ :listCommands 1 } displays the list of available commands.
|
|
||||||
|
|
||||||
{ :profile new-profile-level } sets the database profiler to profile level N.
|
|
||||||
|
|
||||||
{ :reIndex coll } performs re-index on a given collection.
|
|
||||||
|
|
||||||
{ :renameCollection old-name :to new-name } renames collection from old-name to new-name
|
|
||||||
|
|
||||||
{ :repairDatabase 1 } repair and compact the current database (may be very time-consuming, depending on DB size)
|
|
||||||
|
|
||||||
Replica set commands
|
|
||||||
{ :isMaster 1 } checks if this server is a master server.
|
|
||||||
{ :replSetGetStatus 1 } get the status of a replica set.
|
|
||||||
{ :replSetInitiate replica-config } initiate a replica set with given config.
|
|
||||||
{ :replSetReconfig replica-config } set a given config for replica set.
|
|
||||||
{ :replSetStepDown seconds } manually tell a member to step down as primary. It will become primary again after specified amount of seconds.
|
|
||||||
{ :replSetFreeze seconds } freeze state of member, call with 0 to unfreeze.
|
|
||||||
{ :resync 1 } start a full resync of a replica slave
|
|
||||||
For more information, please refer Mongodb Replica Set Command guide: http://www.mongodb.org/display/DOCS/Replica+Set+Commands
|
|
||||||
|
|
||||||
{ :serverStatus 1 } gets administrative statistics about the server.
|
|
||||||
|
|
||||||
{ :shutdown 1 } shuts the MongoDB server down.
|
|
||||||
|
|
||||||
{ :top 1 } get a breakdown of usage by collection.
|
|
||||||
|
|
||||||
{ :validate namespace-name } validate the namespace (collection or index). May be very time-consuming, depending on DB size.
|
|
||||||
|
|
||||||
For :distinct, :count, :drop, :dropIndexes, :mapReduce we suggest to use monger/collection #distinct, #count, #drop, #dropIndexes, :mapReduce respectively.
|
|
||||||
"
|
|
||||||
([^Map cmd]
|
|
||||||
(.command ^DB *mongodb-database* ^DBObject (to-db-object cmd)))
|
|
||||||
([^DB database ^Map cmd]
|
|
||||||
(.command ^DB database ^DBObject (to-db-object cmd))))
|
|
||||||
|
|
||||||
(defprotocol Countable
|
(defprotocol Countable
|
||||||
(count [this] "Returns size of the object"))
|
(count [this] "Returns size of the object"))
|
||||||
|
|
@ -301,23 +308,3 @@
|
||||||
;; MongoDB Java driver could use a lot more specific type than Iterable but
|
;; MongoDB Java driver could use a lot more specific type than Iterable but
|
||||||
;; it always uses DBCollection#find to popular result set. MK.
|
;; it always uses DBCollection#find to popular result set. MK.
|
||||||
(.count ^DBCursor (.results this))))
|
(.count ^DBCursor (.results this))))
|
||||||
|
|
||||||
(defn ^DBObject get-last-error
|
|
||||||
"Returns the the error (if there is one) from the previous operation on this connection.
|
|
||||||
|
|
||||||
The result of this command looks like:
|
|
||||||
|
|
||||||
#<CommandResult { \"serverUsed\" : \"127.0.0.1:27017\" , \"n\" : 0 , \"connectionId\" : 66 , \"err\" : null , \"ok\" : 1.0}>\"
|
|
||||||
|
|
||||||
The value for err will be null if no error occurred, or a description otherwise.
|
|
||||||
|
|
||||||
Important note: when calling this method directly, it is undefined which connection \"getLastError\" is called on.
|
|
||||||
You may need to explicitly use a \"consistent Request\", see requestStart() For most purposes it is better not to call this method directly but instead use WriteConcern."
|
|
||||||
([]
|
|
||||||
(get-last-error *mongodb-database*))
|
|
||||||
([^DB database]
|
|
||||||
(.getLastError ^DB database))
|
|
||||||
([^DB database ^Integer w ^Integer wtimeout ^Boolean fsync]
|
|
||||||
(.getLastError ^DB database w wtimeout fsync))
|
|
||||||
([^DB database ^WriteConcern write-concern]
|
|
||||||
(.getLastError ^DB database write-concern)))
|
|
||||||
|
|
|
||||||
56
src/clojure/monger/credentials.clj
Normal file
56
src/clojure/monger/credentials.clj
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
|
;;
|
||||||
|
;; The APL v2.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
|
;;
|
||||||
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
(ns monger.credentials
|
||||||
|
"Helper functions for instantiating various types
|
||||||
|
of credentials."
|
||||||
|
(:require [clojurewerkz.support.chars :refer :all])
|
||||||
|
(:import [com.mongodb MongoCredential]))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; API
|
||||||
|
;;
|
||||||
|
|
||||||
|
(defn ^MongoCredential create
|
||||||
|
"Creates a MongoCredential instance with an unspecified mechanism.
|
||||||
|
The client will negotiate the best mechanism based on the
|
||||||
|
version of the server that the client is authenticating to."
|
||||||
|
[^String username ^String database pwd]
|
||||||
|
(MongoCredential/createCredential username database (to-char-array pwd)))
|
||||||
|
|
||||||
|
(defn ^MongoCredential x509
|
||||||
|
"Creates a MongoCredential instance for the X509-based authentication
|
||||||
|
protocol."
|
||||||
|
[^String username]
|
||||||
|
(MongoCredential/createMongoX509Credential username))
|
||||||
|
|
||||||
143
src/clojure/monger/cursor.clj
Normal file
143
src/clojure/monger/cursor.clj
Normal file
|
|
@ -0,0 +1,143 @@
|
||||||
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
|
;;
|
||||||
|
;; The APL v2.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
|
;;
|
||||||
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
(ns monger.cursor
|
||||||
|
"Helper-functions for dbCursor object:
|
||||||
|
* to initialize new cursor,
|
||||||
|
* for CRUD functionality of options of dbCursor"
|
||||||
|
(:import [com.mongodb DB DBCursor Bytes]
|
||||||
|
[java.util List Map]
|
||||||
|
[java.lang Integer]
|
||||||
|
[clojure.lang Keyword])
|
||||||
|
(:require [monger.core]
|
||||||
|
[monger.conversion :refer [to-db-object from-db-object as-field-selector]]))
|
||||||
|
|
||||||
|
(defn ^DBCursor make-db-cursor
|
||||||
|
"initializes new db-cursor."
|
||||||
|
([^DB db ^String coll]
|
||||||
|
(make-db-cursor db coll {} {}))
|
||||||
|
([^DB db ^String coll ^Map ref]
|
||||||
|
(make-db-cursor db coll ref {}))
|
||||||
|
([^DB db ^String coll ^Map ref fields]
|
||||||
|
(.find
|
||||||
|
(.getCollection db (name coll))
|
||||||
|
(to-db-object ref)
|
||||||
|
(as-field-selector fields))))
|
||||||
|
|
||||||
|
(def cursor-options {:awaitdata Bytes/QUERYOPTION_AWAITDATA
|
||||||
|
;;:exhaust Bytes/QUERYOPTION_EXHAUST - not human settable
|
||||||
|
:notimeout Bytes/QUERYOPTION_NOTIMEOUT
|
||||||
|
:oplogreplay Bytes/QUERYOPTION_OPLOGREPLAY
|
||||||
|
:partial Bytes/QUERYOPTION_PARTIAL
|
||||||
|
:slaveok Bytes/QUERYOPTION_SLAVEOK
|
||||||
|
:tailable Bytes/QUERYOPTION_TAILABLE})
|
||||||
|
|
||||||
|
(defn get-options
|
||||||
|
"Returns map of cursor's options with current state."
|
||||||
|
[^DBCursor db-cur]
|
||||||
|
(into {}
|
||||||
|
(for [[opt option-mask] cursor-options]
|
||||||
|
[opt (< 0 (bit-and (.getOptions db-cur) option-mask))])))
|
||||||
|
|
||||||
|
(defn add-option!
|
||||||
|
[^DBCursor db-cur ^String opt]
|
||||||
|
(.addOption db-cur (get cursor-options (keyword opt) 0)))
|
||||||
|
|
||||||
|
(defn remove-option!
|
||||||
|
[^DBCursor db-cur ^String opt]
|
||||||
|
(.setOptions db-cur (bit-and-not (.getOptions db-cur)
|
||||||
|
(get cursor-options (keyword opt) 0))))
|
||||||
|
|
||||||
|
(defmulti add-options (fn [db-cur opts] (class opts)))
|
||||||
|
(defmethod add-options Map [^DBCursor db-cur options]
|
||||||
|
"Changes options by using map of settings, which key specifies name of settings
|
||||||
|
and boolean value specifies new state of the setting.
|
||||||
|
usage:
|
||||||
|
(add-options db-cur {:notimeout true, :tailable false})
|
||||||
|
returns:
|
||||||
|
^DBCursor object."
|
||||||
|
(doseq [[opt value] (seq options)]
|
||||||
|
(if (= true value)
|
||||||
|
(add-option! db-cur opt)
|
||||||
|
(remove-option! db-cur opt)))
|
||||||
|
db-cur)
|
||||||
|
|
||||||
|
(defmethod add-options List [^DBCursor db-cur options]
|
||||||
|
"Takes list of options and activates these options
|
||||||
|
usage:
|
||||||
|
(add-options db-cur [:notimeout :tailable])
|
||||||
|
returns:
|
||||||
|
^DBCursor object"
|
||||||
|
(doseq [opt (seq options)]
|
||||||
|
(add-option! db-cur opt))
|
||||||
|
db-cur)
|
||||||
|
|
||||||
|
(defmethod add-options Integer [^DBCursor db-cur, option]
|
||||||
|
"Takes com.mongodb.Byte value and adds it to current settings.
|
||||||
|
usage:
|
||||||
|
(add-options db-cur com.mongodb.Bytes/QUERYOPTION_NOTIMEOUT)
|
||||||
|
returns:
|
||||||
|
^DBCursor object"
|
||||||
|
(.addOption db-cur option)
|
||||||
|
db-cur)
|
||||||
|
|
||||||
|
(defmethod add-options Keyword [^DBCursor db-cur, option]
|
||||||
|
"Takes just one keyword as name of settings and applies it to the db-cursor.
|
||||||
|
usage:
|
||||||
|
(add-options db-cur :notimeout)
|
||||||
|
returns:
|
||||||
|
^DBCursor object"
|
||||||
|
(add-option! db-cur option)
|
||||||
|
db-cur)
|
||||||
|
|
||||||
|
(defmethod add-options :default [^DBCursor db-cur, options]
|
||||||
|
"Using add-options with not supported type of options just passes unchanged cursor"
|
||||||
|
db-cur)
|
||||||
|
|
||||||
|
(defn ^DBCursor reset-options
|
||||||
|
"Resets cursor options to default value and returns cursor"
|
||||||
|
[^DBCursor db-cur]
|
||||||
|
(.resetOptions db-cur)
|
||||||
|
db-cur)
|
||||||
|
|
||||||
|
(defmulti format-as (fn [db-cur as] as))
|
||||||
|
|
||||||
|
(defmethod format-as :map [db-cur as]
|
||||||
|
(map #(from-db-object %1 true) db-cur))
|
||||||
|
|
||||||
|
(defmethod format-as :seq [db-cur as]
|
||||||
|
(seq db-cur))
|
||||||
|
|
||||||
|
(defmethod format-as :default [db-cur as]
|
||||||
|
db-cur)
|
||||||
|
|
||||||
|
|
@ -1,39 +1,62 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
|
;;
|
||||||
|
;; The APL v2.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; Copyright (c) 2012 Toby Hede
|
;; Copyright (c) 2012 Toby Hede
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;; you may not use this file except in compliance with the License.
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; You may obtain a copy of the License at
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;;
|
||||||
;; the terms of this license.
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; Copyright (c) 2012 Toby Hede
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns monger.db
|
(ns monger.db
|
||||||
|
"Functions that provide operations on databases"
|
||||||
(:refer-clojure :exclude [find remove count drop distinct empty?])
|
(:refer-clojure :exclude [find remove count drop distinct empty?])
|
||||||
(:import [com.mongodb Mongo DB DBCollection])
|
(:import [com.mongodb Mongo DB DBCollection])
|
||||||
(:require monger.core)
|
(:require monger.core
|
||||||
(:use monger.conversion))
|
[monger.conversion :refer :all]))
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; API
|
||||||
|
;;
|
||||||
|
|
||||||
(defn add-user
|
(defn add-user
|
||||||
"Adds a new user for this db"
|
"Adds a new user for this db"
|
||||||
([^String username, ^chars password]
|
[^DB db ^String username ^chars password]
|
||||||
(.addUser ^DB monger.core/*mongodb-database* username password))
|
(.addUser db username password))
|
||||||
([^DB database ^String username ^chars password]
|
|
||||||
(.addUser ^DB database username password)))
|
|
||||||
|
|
||||||
|
|
||||||
(defn drop-db
|
(defn drop-db
|
||||||
"Drops the currently set database (via core/set-db) or the specified database."
|
"Drops the currently set database (via core/set-db) or the specified database."
|
||||||
([]
|
[^DB db]
|
||||||
(.dropDatabase ^DB monger.core/*mongodb-database*))
|
(.dropDatabase db))
|
||||||
([^DB database]
|
|
||||||
(.dropDatabase ^DB database)))
|
|
||||||
|
|
||||||
(defn get-collection-names
|
(defn get-collection-names
|
||||||
"Returns a set containing the names of all collections in this database."
|
"Returns a set containing the names of all collections in this database."
|
||||||
([]
|
([^DB db]
|
||||||
(set (.getCollectionNames ^DB monger.core/*mongodb-database*)))
|
(set (.getCollectionNames db))))
|
||||||
([^DB database]
|
|
||||||
(set (.getCollectionNames ^DB database))))
|
|
||||||
|
|
|
||||||
|
|
@ -1,26 +1,50 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; The APL v2.0:
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;;
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; ----------------------------------------------------------------------------------
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; the terms of this license.
|
;;
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns
|
(ns monger.gridfs
|
||||||
^{:doc "Provides functions and macros for working with GridFS: storing files in GridFS, streaming files from GridFS,
|
"Provides functions and macros for working with GridFS: storing files in GridFS, streaming files from GridFS,
|
||||||
finding stored files.
|
finding stored files.
|
||||||
|
|
||||||
Related documentation guide: http://clojuremongodb.info/articles/gridfs.html"}
|
Related documentation guide: http://clojuremongodb.info/articles/gridfs.html"
|
||||||
monger.gridfs
|
|
||||||
(:refer-clojure :exclude [remove find])
|
(:refer-clojure :exclude [remove find])
|
||||||
(:require monger.core
|
(:require monger.core
|
||||||
[clojure.java.io :as io])
|
[clojure.java.io :as io]
|
||||||
(:use monger.conversion
|
[monger.conversion :refer :all]
|
||||||
[clojurewerkz.support.fn :only [fpartial]])
|
[clojurewerkz.support.fn :refer [fpartial]])
|
||||||
(:import [com.mongodb DB DBObject]
|
(:import [com.mongodb DB DBObject]
|
||||||
|
org.bson.types.ObjectId
|
||||||
[com.mongodb.gridfs GridFS GridFSInputFile]
|
[com.mongodb.gridfs GridFS GridFSInputFile]
|
||||||
[java.io InputStream File]))
|
[java.io InputStream ByteArrayInputStream File]))
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; Implementation
|
;; Implementation
|
||||||
|
|
@ -42,24 +66,16 @@
|
||||||
|
|
||||||
|
|
||||||
(defn remove
|
(defn remove
|
||||||
([]
|
[^GridFS fs query]
|
||||||
(remove {}))
|
(.remove fs ^DBObject (to-db-object query)))
|
||||||
([query]
|
|
||||||
(.remove ^GridFS monger.core/*mongodb-gridfs* ^DBObject (to-db-object query)))
|
|
||||||
([^GridFS fs query]
|
|
||||||
(.remove fs ^DBObject (to-db-object query))))
|
|
||||||
|
|
||||||
(defn remove-all
|
(defn remove-all
|
||||||
([]
|
[^GridFS fs]
|
||||||
(remove {}))
|
(remove fs {}))
|
||||||
([^GridFS fs]
|
|
||||||
(remove fs {})))
|
|
||||||
|
|
||||||
(defn all-files
|
(defn all-files
|
||||||
([]
|
([^GridFS fs]
|
||||||
(.getFileList ^GridFS monger.core/*mongodb-gridfs*))
|
(.getFileList fs (to-db-object {})))
|
||||||
([query]
|
|
||||||
(.getFileList ^GridFS monger.core/*mongodb-gridfs* query))
|
|
||||||
([^GridFS fs query]
|
([^GridFS fs query]
|
||||||
(.getFileList fs query)))
|
(.getFileList fs query)))
|
||||||
|
|
||||||
|
|
@ -67,10 +83,8 @@
|
||||||
(fpartial from-db-object true))
|
(fpartial from-db-object true))
|
||||||
|
|
||||||
(defn files-as-maps
|
(defn files-as-maps
|
||||||
([]
|
([^GridFS fs]
|
||||||
(map converter (all-files)))
|
(files-as-maps fs {}))
|
||||||
([query]
|
|
||||||
(map converter (all-files (to-db-object query))))
|
|
||||||
([^GridFS fs query]
|
([^GridFS fs query]
|
||||||
(map converter (all-files fs (to-db-object query)))))
|
(map converter (all-files fs (to-db-object query)))))
|
||||||
|
|
||||||
|
|
@ -79,27 +93,51 @@
|
||||||
;; Plumbing (low-level API)
|
;; Plumbing (low-level API)
|
||||||
;;
|
;;
|
||||||
|
|
||||||
|
(defprotocol InputStreamFactory
|
||||||
|
(^InputStream to-input-stream [input] "Makes InputStream out of the given input"))
|
||||||
|
|
||||||
|
(extend byte-array-type
|
||||||
|
InputStreamFactory
|
||||||
|
{:to-input-stream (fn [^bytes input]
|
||||||
|
(ByteArrayInputStream. input))})
|
||||||
|
|
||||||
|
(extend-protocol InputStreamFactory
|
||||||
|
String
|
||||||
|
(to-input-stream [^String input]
|
||||||
|
(io/make-input-stream input {:encoding "UTF-8"}))
|
||||||
|
|
||||||
|
File
|
||||||
|
(to-input-stream [^File input]
|
||||||
|
(io/make-input-stream input {:encoding "UTF-8"}))
|
||||||
|
|
||||||
|
InputStream
|
||||||
|
(to-input-stream [^InputStream input]
|
||||||
|
input))
|
||||||
|
|
||||||
(defprotocol GridFSInputFileFactory
|
(defprotocol GridFSInputFileFactory
|
||||||
(^com.mongodb.gridfs.GridFSInputFile make-input-file [input] "Makes GridFSInputFile out of the given input"))
|
(^GridFSInputFile create-gridfs-file [input ^GridFS fs] "Creates a file entry"))
|
||||||
|
|
||||||
(extend byte-array-type
|
(extend byte-array-type
|
||||||
GridFSInputFileFactory
|
GridFSInputFileFactory
|
||||||
{:make-input-file (fn [^bytes input]
|
{:create-gridfs-file (fn [^bytes input ^GridFS fs]
|
||||||
(.createFile ^GridFS monger.core/*mongodb-gridfs* input))})
|
(.createFile fs input))})
|
||||||
|
|
||||||
(extend-protocol GridFSInputFileFactory
|
(extend-protocol GridFSInputFileFactory
|
||||||
String
|
String
|
||||||
(make-input-file [^String input]
|
(create-gridfs-file [^String input ^GridFS fs]
|
||||||
(.createFile ^GridFS monger.core/*mongodb-gridfs* ^InputStream (io/make-input-stream input {:encoding "UTF-8"})))
|
(.createFile fs (io/file input)))
|
||||||
|
|
||||||
File
|
File
|
||||||
(make-input-file [^File input]
|
(create-gridfs-file [^File input ^GridFS fs]
|
||||||
(.createFile ^GridFS monger.core/*mongodb-gridfs* ^InputStream (io/make-input-stream input {:encoding "UTF-8"})))
|
(.createFile fs input))
|
||||||
|
|
||||||
InputStream
|
InputStream
|
||||||
(make-input-file [^InputStream input]
|
(create-gridfs-file [^InputStream input ^GridFS fs]
|
||||||
(.createFile ^GridFS monger.core/*mongodb-gridfs* ^InputStream input)))
|
(.createFile fs input)))
|
||||||
|
|
||||||
|
(defn ^GridFSInputFile make-input-file
|
||||||
|
[^GridFS fs input]
|
||||||
|
(create-gridfs-file input fs))
|
||||||
|
|
||||||
(defmacro store
|
(defmacro store
|
||||||
[^GridFSInputFile input & body]
|
[^GridFSInputFile input & body]
|
||||||
|
|
@ -107,9 +145,8 @@
|
||||||
(.save f# GridFS/DEFAULT_CHUNKSIZE)
|
(.save f# GridFS/DEFAULT_CHUNKSIZE)
|
||||||
(from-db-object f# true)))
|
(from-db-object f# true)))
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; "New" DSL, a higher-level API
|
;; Higher-level API
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defn save
|
(defn save
|
||||||
|
|
@ -142,48 +179,34 @@
|
||||||
;; Finders
|
;; Finders
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defprotocol Finders
|
(defn find
|
||||||
(find [input] "Finds multiple files using given input (an ObjectId, filename or query)")
|
[^GridFS fs query]
|
||||||
(find-one [input] "Finds one file using given input (an ObjectId, filename or query)")
|
(.find fs (to-db-object query)))
|
||||||
(find-maps [input] "Finds multiple files using given input (an ObjectId, filename or query), returning a Clojure map")
|
|
||||||
(find-one-as-map [input] "Finds one file using given input (an ObjectId, filename or query), returning a Clojure map"))
|
|
||||||
|
|
||||||
(extend-protocol Finders
|
(defn find-by-filename
|
||||||
String
|
[^GridFS fs ^String filename]
|
||||||
(find [^String input]
|
(.find fs (to-db-object {"filename" filename})))
|
||||||
(.find ^GridFS monger.core/*mongodb-gridfs* input))
|
|
||||||
(find-one [^String input]
|
|
||||||
(.findOne ^GridFS monger.core/*mongodb-gridfs* input))
|
|
||||||
(find-maps [^String input]
|
|
||||||
(map converter (find input)))
|
|
||||||
(find-one-as-map [^String input]
|
|
||||||
(converter (find-one input)))
|
|
||||||
|
|
||||||
org.bson.types.ObjectId
|
(defn find-by-md5
|
||||||
(find-one [^org.bson.types.ObjectId input]
|
[^GridFS fs ^String md5]
|
||||||
(.findOne ^GridFS monger.core/*mongodb-gridfs* input))
|
(.find fs (to-db-object {"md5" md5})))
|
||||||
(find-one-as-map [^org.bson.types.ObjectId input]
|
|
||||||
(converter (find-one input)))
|
|
||||||
|
|
||||||
|
(defn find-one
|
||||||
|
[^GridFS fs query]
|
||||||
|
(.findOne fs (to-db-object query)))
|
||||||
|
|
||||||
DBObject
|
(defn find-maps
|
||||||
(find [^DBObject input]
|
[^GridFS fs query]
|
||||||
(.find ^GridFS monger.core/*mongodb-gridfs* input))
|
(map converter (find fs query)))
|
||||||
(find-one [^DBObject input]
|
|
||||||
(.findOne ^GridFS monger.core/*mongodb-gridfs* input))
|
|
||||||
(find-maps [^DBObject input]
|
|
||||||
(map converter (find input)))
|
|
||||||
(find-one-as-map [^DBObject input]
|
|
||||||
(converter (find-one input)))
|
|
||||||
|
|
||||||
;; using java.util.Map here results in (occasional) recursion
|
(defn find-one-as-map
|
||||||
clojure.lang.IPersistentMap
|
[^GridFS fs query]
|
||||||
(find [^java.util.Map input]
|
(converter (find-one fs query)))
|
||||||
(find (to-db-object input)))
|
|
||||||
(find-one [^java.util.Map input]
|
|
||||||
(find-one (to-db-object input)))
|
|
||||||
(find-maps [^java.util.Map input]
|
|
||||||
(find-maps (to-db-object input)))
|
|
||||||
(find-one-as-map [^java.util.Map input]
|
|
||||||
(find-one-as-map (to-db-object input))))
|
|
||||||
|
|
||||||
|
(defn find-by-id
|
||||||
|
[^GridFS fs ^ObjectId id]
|
||||||
|
(.findOne fs id))
|
||||||
|
|
||||||
|
(defn find-map-by-id
|
||||||
|
[^GridFS fs ^ObjectId id]
|
||||||
|
(converter (find-by-id fs id)))
|
||||||
|
|
|
||||||
|
|
@ -1,73 +0,0 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
|
||||||
;;
|
|
||||||
;; The use and distribution terms for this software are covered by the
|
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
|
||||||
;; the terms of this license.
|
|
||||||
;; You must not remove this notice, or any other, from this software.
|
|
||||||
|
|
||||||
(ns monger.internal.fn)
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; Implementation
|
|
||||||
;;
|
|
||||||
|
|
||||||
(defn- apply-to-values [m f]
|
|
||||||
"Applies function f to all values in map m"
|
|
||||||
(into {} (for [[k v] m]
|
|
||||||
[k (f v)])))
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; API
|
|
||||||
;;
|
|
||||||
|
|
||||||
(defn fpartial
|
|
||||||
"Like clojure.core/partial but prepopulates last N arguments (first is passed in later)"
|
|
||||||
[f & args]
|
|
||||||
(fn [arg & more] (apply f arg (concat args more))))
|
|
||||||
|
|
||||||
(defprotocol IFNExpansion
|
|
||||||
(expand-all [x] "Replaces functions with their invocation results, recursively expands maps, evaluates all other values to themselves")
|
|
||||||
(expand-all-with [x f] "Replaces functions with their invocation results that function f is applied to, recursively expands maps, evaluates all other values to themselves"))
|
|
||||||
|
|
||||||
(extend-protocol IFNExpansion
|
|
||||||
java.lang.Integer
|
|
||||||
(expand-all [i] i)
|
|
||||||
(expand-all-with [i f] i)
|
|
||||||
|
|
||||||
java.lang.Long
|
|
||||||
(expand-all [l] l)
|
|
||||||
(expand-all-with [l f] l)
|
|
||||||
|
|
||||||
java.lang.String
|
|
||||||
(expand-all [s] s)
|
|
||||||
(expand-all-with [s f] s)
|
|
||||||
|
|
||||||
java.lang.Float
|
|
||||||
(expand-all [fl] fl)
|
|
||||||
(expand-all-with [fl f] fl)
|
|
||||||
|
|
||||||
java.lang.Double
|
|
||||||
(expand-all [d] d)
|
|
||||||
(expand-all-with [d f] d)
|
|
||||||
|
|
||||||
;; maps are also functions, so be careful here. MK.
|
|
||||||
clojure.lang.IPersistentMap
|
|
||||||
(expand-all [m] (apply-to-values m expand-all))
|
|
||||||
(expand-all-with [m f] (apply-to-values m (fpartial expand-all-with f)))
|
|
||||||
|
|
||||||
clojure.lang.PersistentVector
|
|
||||||
(expand-all [v] (map expand-all v))
|
|
||||||
(expand-all-with [v f] (map (fpartial expand-all-with f) v))
|
|
||||||
|
|
||||||
;; this distinguishes functions from maps, sets and so on, which are also
|
|
||||||
;; clojure.lang.AFn subclasses. MK.
|
|
||||||
clojure.lang.AFunction
|
|
||||||
(expand-all [f] (f))
|
|
||||||
(expand-all-with [f expander] (expander f))
|
|
||||||
|
|
||||||
Object
|
|
||||||
(expand-all [x] x)
|
|
||||||
(expand-all-with [x f] x))
|
|
||||||
|
|
@ -1,11 +1,35 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; The APL v2.0:
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;;
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; ----------------------------------------------------------------------------------
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; the terms of this license.
|
;;
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns monger.internal.pagination)
|
(ns monger.internal.pagination)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,48 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; The APL v2.0:
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;;
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; ----------------------------------------------------------------------------------
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; the terms of this license.
|
;;
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns monger.joda-time
|
(ns monger.joda-time
|
||||||
|
"An optional convenience namespaces for applications that heavily use dates and would prefer use JodaTime types
|
||||||
|
transparently when storing and loading them from MongoDB and serializing to JSON and/or with Clojure reader.
|
||||||
|
|
||||||
|
Enables automatic conversion of JodaTime date/time/instant instances to JDK dates (java.util.Date) when documents
|
||||||
|
are serialized and the other way around when documents are loaded. Extends clojure.data.json/Write-JSON protocol for
|
||||||
|
JodaTime types.
|
||||||
|
|
||||||
|
To use it, make sure you add dependencies on clj-time (or JodaTime) and clojure.data.json."
|
||||||
(:import [org.joda.time DateTime DateTimeZone ReadableInstant]
|
(:import [org.joda.time DateTime DateTimeZone ReadableInstant]
|
||||||
[org.joda.time.format ISODateTimeFormat])
|
[org.joda.time.format ISODateTimeFormat])
|
||||||
(:use [monger.conversion])
|
(:require [monger.conversion :refer :all]))
|
||||||
(:require [clojure.data.json :as json]
|
|
||||||
clojurewerkz.support.json))
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; API
|
;; API
|
||||||
|
|
@ -21,9 +51,33 @@
|
||||||
(extend-protocol ConvertToDBObject
|
(extend-protocol ConvertToDBObject
|
||||||
org.joda.time.base.AbstractInstant
|
org.joda.time.base.AbstractInstant
|
||||||
(to-db-object [^AbstractInstant input]
|
(to-db-object [^AbstractInstant input]
|
||||||
|
(to-db-object (.toDate input)))
|
||||||
|
org.joda.time.base.AbstractPartial
|
||||||
|
(to-db-object [^AbstractPartial input]
|
||||||
(to-db-object (.toDate input))))
|
(to-db-object (.toDate input))))
|
||||||
|
|
||||||
(extend-protocol ConvertFromDBObject
|
(extend-protocol ConvertFromDBObject
|
||||||
java.util.Date
|
java.util.Date
|
||||||
(from-db-object [^java.util.Date input keywordize]
|
(from-db-object [^java.util.Date input keywordize]
|
||||||
(org.joda.time.DateTime. input)))
|
(org.joda.time.DateTime. input)))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; Reader extensions
|
||||||
|
;;
|
||||||
|
|
||||||
|
(defmethod print-dup org.joda.time.base.AbstractInstant
|
||||||
|
[^org.joda.time.base.AbstractInstant d out]
|
||||||
|
(print-dup (.toDate d) out))
|
||||||
|
|
||||||
|
|
||||||
|
(defmethod print-dup org.joda.time.base.AbstractPartial
|
||||||
|
[^org.joda.time.base.AbstractPartial d out]
|
||||||
|
(print-dup (.toDate d) out))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; JSON serialization
|
||||||
|
;;
|
||||||
|
|
||||||
|
(require 'clojurewerkz.support.json)
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,38 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; The APL v2.0:
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;;
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; ----------------------------------------------------------------------------------
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; the terms of this license.
|
;;
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns ^{:doc "Kept for backwards compatibility. Please use clojurewerkz.support.js from now on."} monger.js
|
(ns monger.js
|
||||||
|
"Kept for backwards compatibility. Please use clojurewerkz.support.js from now on."
|
||||||
(:require [clojurewerkz.support.js :as js]))
|
(:require [clojurewerkz.support.js :as js]))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,24 +1,116 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; The APL v2.0:
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;;
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; ----------------------------------------------------------------------------------
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; the terms of this license.
|
;;
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
(ns monger.json
|
||||||
|
"Provides clojure.data.json/Write-JSON protocol extension for MongoDB-specific types, such as
|
||||||
|
org.bson.types.ObjectId"
|
||||||
|
(:import org.bson.types.ObjectId
|
||||||
|
org.bson.types.BSONTimestamp))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; Implementation
|
||||||
|
;;
|
||||||
|
|
||||||
|
;; copied from clojure.reducers
|
||||||
|
(defmacro ^:private compile-if
|
||||||
|
"Evaluate `exp` and if it returns logical true and doesn't error, expand to
|
||||||
|
`then`. Else expand to `else`.
|
||||||
|
|
||||||
|
(compile-if (Class/forName \"java.util.concurrent.ForkJoinTask\")
|
||||||
|
(do-cool-stuff-with-fork-join)
|
||||||
|
(fall-back-to-executor-services))"
|
||||||
|
[exp then else]
|
||||||
|
(if (try (eval exp)
|
||||||
|
(catch Throwable _ false))
|
||||||
|
`(do ~then)
|
||||||
|
`(do ~else)))
|
||||||
|
|
||||||
|
|
||||||
(ns ^{:doc "Provides clojure.data.json/Write-JSON protocol extension for MongoDB-specific types, such as
|
|
||||||
org.bson.types.ObjectId"}
|
|
||||||
monger.json
|
|
||||||
(:import org.bson.types.ObjectId)
|
|
||||||
(:require [clojure.data.json :as json]
|
|
||||||
clojurewerkz.support.json))
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; API
|
;; API
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(extend-protocol json/Write-JSON
|
(require 'clojurewerkz.support.json)
|
||||||
ObjectId
|
|
||||||
(write-json [^ObjectId object out escape-unicode?]
|
;; all this madness would not be necessary if some people cared about backwards
|
||||||
(json/write-json (.toString object) out escape-unicode?)))
|
;; compatiblity of the libraries they maintain. Shame on the clojure.data.json maintainer. MK.
|
||||||
|
(compile-if (and (find-ns 'clojure.data.json)
|
||||||
|
clojure.data.json/JSONWriter)
|
||||||
|
(try
|
||||||
|
(extend-protocol clojure.data.json/JSONWriter
|
||||||
|
ObjectId
|
||||||
|
(-write
|
||||||
|
([^ObjectId object out]
|
||||||
|
(clojure.data.json/write (.toString object) out))
|
||||||
|
([^ObjectId object out options]
|
||||||
|
(clojure.data.json/write (.toString object) out options))))
|
||||||
|
|
||||||
|
(extend-protocol clojure.data.json/JSONWriter
|
||||||
|
BSONTimestamp
|
||||||
|
(-write
|
||||||
|
([^BSONTimestamp object out]
|
||||||
|
(clojure.data.json/write {:time (.getTime object) :inc (.getInc object)} out))
|
||||||
|
([^BSONTimestamp object out options]
|
||||||
|
(clojure.data.json/write {:time (.getTime object) :inc (.getInc object)} out options))))
|
||||||
|
|
||||||
|
(catch Throwable _
|
||||||
|
false))
|
||||||
|
(comment "Nothing to do, clojure.data.json is not available"))
|
||||||
|
|
||||||
|
(compile-if (and (find-ns 'clojure.data.json)
|
||||||
|
clojure.data.json/Write-JSON)
|
||||||
|
(try
|
||||||
|
(extend-protocol clojure.data.json/Write-JSON
|
||||||
|
ObjectId
|
||||||
|
(write-json [^ObjectId object out escape-unicode?]
|
||||||
|
(clojure.data.json/write-json (.toString object) out escape-unicode?)))
|
||||||
|
(catch Throwable _
|
||||||
|
false))
|
||||||
|
(comment "Nothing to do, clojure.data.json 0.1.x is not available"))
|
||||||
|
|
||||||
|
|
||||||
|
(try
|
||||||
|
(require 'cheshire.generate)
|
||||||
|
(catch Throwable t
|
||||||
|
false))
|
||||||
|
|
||||||
|
(try
|
||||||
|
(cheshire.generate/add-encoder ObjectId
|
||||||
|
(fn [^ObjectId oid ^com.fasterxml.jackson.core.json.WriterBasedJsonGenerator generator]
|
||||||
|
(.writeString generator (.toString oid))))
|
||||||
|
(cheshire.generate/add-encoder BSONTimestamp
|
||||||
|
(fn [^BSONTimestamp ts ^com.fasterxml.jackson.core.json.WriterBasedJsonGenerator generator]
|
||||||
|
(cheshire.generate/encode-map {:time (.getTime ts) :inc (.getInc ts)} generator)))
|
||||||
|
(catch Throwable t
|
||||||
|
false))
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,42 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; The APL v2.0:
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;;
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; ----------------------------------------------------------------------------------
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; the terms of this license.
|
;;
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns ^{:doc "Provides vars that represent various MongoDB operators, for example, $gt or $in or $regex.
|
(ns monger.operators
|
||||||
They can be passed in queries as strings but using vars from this namespace makes the code
|
"Provides vars that represent various MongoDB operators, for example, $gt or $in or $regex.
|
||||||
a bit cleaner and closer to what you would see in a MongoDB shell query.
|
They can be passed in queries as strings but using vars from this namespace makes the code
|
||||||
|
a bit cleaner and closer to what you would see in a MongoDB shell query.
|
||||||
|
|
||||||
Related documentation guide: http://clojuremongodb.info/articles/querying.html"}
|
Related documentation guide: http://clojuremongodb.info/articles/querying.html")
|
||||||
monger.operators)
|
|
||||||
|
|
||||||
(defmacro ^{:private true} defoperator
|
(defmacro ^{:private true} defoperator
|
||||||
[operator]
|
[operator]
|
||||||
|
|
@ -22,6 +46,8 @@
|
||||||
;; QUERY OPERATORS
|
;; QUERY OPERATORS
|
||||||
;;
|
;;
|
||||||
|
|
||||||
|
(declare $gt $gte $lt $lte $all $in $nin $eq $ne $elemMatch $regex $options)
|
||||||
|
|
||||||
;; $gt is "greater than" comparator
|
;; $gt is "greater than" comparator
|
||||||
;; $gte is "greater than or equals" comparator
|
;; $gte is "greater than or equals" comparator
|
||||||
;; $gt is "less than" comparator
|
;; $gt is "less than" comparator
|
||||||
|
|
@ -56,10 +82,16 @@
|
||||||
;; (mgcol/find-maps "languages" { :tags { $nin [ "functional" ] } } )
|
;; (mgcol/find-maps "languages" { :tags { $nin [ "functional" ] } } )
|
||||||
(defoperator $nin)
|
(defoperator $nin)
|
||||||
|
|
||||||
|
;; $eq is "equals" comparator
|
||||||
|
;;
|
||||||
|
;; EXAMPLES:
|
||||||
|
;; (monger.collection/find "libraries" { :language { $eq "Clojure" }})
|
||||||
|
(defoperator $eq)
|
||||||
|
|
||||||
;; $ne is "non-equals" comparator
|
;; $ne is "non-equals" comparator
|
||||||
;;
|
;;
|
||||||
;; EXAMPLES:
|
;; EXAMPLES:
|
||||||
;; (monger.collection/find "libraries" {$ne { :language "Clojure" }})
|
;; (monger.collection/find "libraries" { :language { $ne "Clojure" }})
|
||||||
(defoperator $ne)
|
(defoperator $ne)
|
||||||
|
|
||||||
;; $elemMatch checks if an element in an array matches the specified expression
|
;; $elemMatch checks if an element in an array matches the specified expression
|
||||||
|
|
@ -72,6 +104,37 @@
|
||||||
(defoperator $regex)
|
(defoperator $regex)
|
||||||
(defoperator $options)
|
(defoperator $options)
|
||||||
|
|
||||||
|
;; comment on a query predicate
|
||||||
|
|
||||||
|
(declare $comment $explain $hint $maxTimeMS $orderBy $query $returnKey $showDiskLoc $natural)
|
||||||
|
|
||||||
|
(defoperator $comment)
|
||||||
|
(defoperator $explain)
|
||||||
|
(defoperator $hint)
|
||||||
|
(defoperator $maxTimeMS)
|
||||||
|
(defoperator $orderBy)
|
||||||
|
(defoperator $query)
|
||||||
|
(defoperator $returnKey)
|
||||||
|
(defoperator $showDiskLoc)
|
||||||
|
(defoperator $natural)
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; EVALUATION (QUERY)
|
||||||
|
;;
|
||||||
|
|
||||||
|
(declare $expr $jsonSchema $where $and $or $nor)
|
||||||
|
|
||||||
|
(defoperator $expr)
|
||||||
|
(defoperator $jsonSchema)
|
||||||
|
|
||||||
|
;; Matches documents that satisfy a JavaScript expression.
|
||||||
|
;;
|
||||||
|
;; EXAMPLES:
|
||||||
|
;;
|
||||||
|
;; (monger.collection/find "people" { $where "this.placeOfBirth === this.address.city" })
|
||||||
|
(defoperator $where)
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; LOGIC OPERATORS
|
;; LOGIC OPERATORS
|
||||||
;;
|
;;
|
||||||
|
|
@ -104,6 +167,8 @@
|
||||||
;; ATOMIC MODIFIERS
|
;; ATOMIC MODIFIERS
|
||||||
;;
|
;;
|
||||||
|
|
||||||
|
(declare $inc $mul $set $unset $setOnInsert $rename $push $position $each $addToSet $pop $pull $pullAll $bit $bitsAllClear $bitsAllSet $bitsAnyClear $bitsAnySet $exists $mod $size $type $not)
|
||||||
|
|
||||||
;; $inc increments one or many fields for the given value, otherwise sets the field to value
|
;; $inc increments one or many fields for the given value, otherwise sets the field to value
|
||||||
;;
|
;;
|
||||||
;; EXAMPLES:
|
;; EXAMPLES:
|
||||||
|
|
@ -111,6 +176,8 @@
|
||||||
;; (monger.collection/update "scores" { :_id user-id } { :score 20 :bonus 10 } })
|
;; (monger.collection/update "scores" { :_id user-id } { :score 20 :bonus 10 } })
|
||||||
(defoperator $inc)
|
(defoperator $inc)
|
||||||
|
|
||||||
|
(defoperator $mul)
|
||||||
|
|
||||||
;; $set sets an existing (or non-existing) field (or set of fields) to value
|
;; $set sets an existing (or non-existing) field (or set of fields) to value
|
||||||
;; $set supports all datatypes.
|
;; $set supports all datatypes.
|
||||||
;;
|
;;
|
||||||
|
|
@ -125,6 +192,13 @@
|
||||||
;; (monger.collection/update "things" { :_id oid } { $unset { :weight 1 } })
|
;; (monger.collection/update "things" { :_id oid } { $unset { :weight 1 } })
|
||||||
(defoperator $unset)
|
(defoperator $unset)
|
||||||
|
|
||||||
|
;; $setOnInsert assigns values to fields during an upsert only when using the upsert option to the update operation performs an insert.
|
||||||
|
;; New in version 2.4. http://docs.mongodb.org/manual/reference/operator/setOnInsert/
|
||||||
|
;;
|
||||||
|
;; EXAMPLES:
|
||||||
|
;; (monger.collection/find-and-modify "things" {:_id oid} {$set {:lastseen now} $setOnInsert {:firstseen now}} :upsert true)
|
||||||
|
(defoperator $setOnInsert)
|
||||||
|
|
||||||
;; $rename renames a given field
|
;; $rename renames a given field
|
||||||
;;
|
;;
|
||||||
;; EXAMPLES:
|
;; EXAMPLES:
|
||||||
|
|
@ -138,12 +212,17 @@
|
||||||
;; (mgcol/update "docs" { :_id oid } { $push { :tags "modifiers" } })
|
;; (mgcol/update "docs" { :_id oid } { $push { :tags "modifiers" } })
|
||||||
(defoperator $push)
|
(defoperator $push)
|
||||||
|
|
||||||
;; $pushAll appends each value in value_array to field, if field is an existing array, otherwise sets field to the array value_array
|
;; $position modifies the behavior of $push per https://docs.mongodb.com/manual/reference/operator/update/position/
|
||||||
;; if field is not present. If field is present but is not an array, an error condition is raised.
|
(defoperator $position)
|
||||||
|
|
||||||
|
;; $each is a modifier for the $push and $addToSet operators for appending multiple values to an array field.
|
||||||
|
;; Without the $each modifier $push and $addToSet will append an array as a single value.
|
||||||
|
;; MongoDB 2.4 adds support for the $each modifier to the $push operator.
|
||||||
|
;; In MongoDB 2.2 the $each modifier can only be used with the $addToSet operator.
|
||||||
;;
|
;;
|
||||||
;; EXAMPLES:
|
;; EXAMPLES:
|
||||||
;; (mgcol/update coll { :_id oid } { $pushAll { :tags ["mongodb" "docs"] } })
|
;; (mgcol/update coll { :_id oid } { $push { :tags { $each ["mongodb" "docs"] } } })
|
||||||
(defoperator $pushAll)
|
(defoperator $each)
|
||||||
|
|
||||||
;; $addToSet Adds value to the array only if its not in the array already, if field is an existing array, otherwise sets field to the
|
;; $addToSet Adds value to the array only if its not in the array already, if field is an existing array, otherwise sets field to the
|
||||||
;; array value if field is not present. If field is present but is not an array, an error condition is raised.
|
;; array value if field is not present. If field is present but is not an array, an error condition is raised.
|
||||||
|
|
@ -171,11 +250,15 @@
|
||||||
;; an error condition is raised.
|
;; an error condition is raised.
|
||||||
;;
|
;;
|
||||||
;; EXAMPLES:
|
;; EXAMPLES:
|
||||||
;; (mgcol/update coll { :_id oid } { $pull { :measurements 1.2 } })
|
;; (mgcol/update coll { :_id oid } { $pullAll { :measurements 1.2 } })
|
||||||
;; (mgcol/update coll { :_id oid } { $pull { :measurements { $gte 1.2 } } })
|
;; (mgcol/update coll { :_id oid } { $pullAll { :measurements { $gte 1.2 } } })
|
||||||
(defoperator $pullAll)
|
(defoperator $pullAll)
|
||||||
|
|
||||||
(defoperator $bit)
|
(defoperator $bit)
|
||||||
|
(defoperator $bitsAllClear)
|
||||||
|
(defoperator $bitsAllSet)
|
||||||
|
(defoperator $bitsAnyClear)
|
||||||
|
(defoperator $bitsAnySet)
|
||||||
|
|
||||||
(defoperator $exists)
|
(defoperator $exists)
|
||||||
(defoperator $mod)
|
(defoperator $mod)
|
||||||
|
|
@ -185,33 +268,132 @@
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; Aggregation in 2.2
|
;; Aggregation in 4.2
|
||||||
;;
|
;;
|
||||||
|
|
||||||
|
(declare $addFields $bucket $bucketAuto $collStats $facet $geoNear $graphLookup $indexStats $listSessions $lookup $match $merge $out $planCacheStats $project $redact $replaceRoot $replaceWith $sample $limit $skip $unwind $group $sort $sortByCount $currentOp $listLocalSessions $cmp $min $max $avg $stdDevPop $stdDevSamp $sum $let $first $last $abs $add $ceil $divide $exp $floor $ln $log $log10 $multiply $pow $round $sqrt $subtract $trunc $literal $arrayElemAt $arrayToObject $concatArrays $filter $indexOfArray $isArray $map $objectToArray $range $reduce $reverseArray $zip $mergeObjects $allElementsTrue $anyElementsTrue $setDifference $setEquals $setIntersection $setIsSubset $setUnion $strcasecmp $substr $substrBytes $substrCP $toLower $toString $toUpper $concat $indexOfBytes $indexOfCP $ltrim $regexFind $regexFindAll $regexMatch $rtrim $split $strLenBytes $subLenCP $trim $sin $cos $tan $asin $acos $atan $atan2 $asinh $acosh $atanh $radiansToDegrees $degreesToRadians $convert $toBool $toDecimal $toDouble $toInt $toLong $toObjectId $dayOfMonth $dayOfWeek $dayOfYear $hour $minute $month $second $millisecond $week $year $isoDate $dateFromParts $dateFromString $dateToParts $dateToString $isoDayOfWeek $isoWeek $isoWeekYear $toDate $ifNull $cond $switch)
|
||||||
|
|
||||||
|
(defoperator $addFields)
|
||||||
|
(defoperator $bucket)
|
||||||
|
(defoperator $bucketAuto)
|
||||||
|
(defoperator $collStats)
|
||||||
|
(defoperator $facet)
|
||||||
|
(defoperator $geoNear)
|
||||||
|
(defoperator $graphLookup)
|
||||||
|
(defoperator $indexStats)
|
||||||
|
(defoperator $listSessions)
|
||||||
|
(defoperator $lookup)
|
||||||
(defoperator $match)
|
(defoperator $match)
|
||||||
|
(defoperator $merge)
|
||||||
|
(defoperator $out)
|
||||||
|
(defoperator $planCacheStats)
|
||||||
(defoperator $project)
|
(defoperator $project)
|
||||||
|
(defoperator $redact)
|
||||||
|
(defoperator $replaceRoot)
|
||||||
|
(defoperator $replaceWith)
|
||||||
|
(defoperator $sample)
|
||||||
(defoperator $limit)
|
(defoperator $limit)
|
||||||
(defoperator $skip)
|
(defoperator $skip)
|
||||||
(defoperator $unwind)
|
(defoperator $unwind)
|
||||||
(defoperator $group)
|
(defoperator $group)
|
||||||
(defoperator $sort)
|
(defoperator $sort)
|
||||||
|
(defoperator $sortByCount)
|
||||||
|
|
||||||
|
(defoperator $currentOp)
|
||||||
|
(defoperator $listLocalSessions)
|
||||||
|
|
||||||
(defoperator $cmp)
|
(defoperator $cmp)
|
||||||
|
|
||||||
(defoperator $min)
|
(defoperator $min)
|
||||||
(defoperator $max)
|
(defoperator $max)
|
||||||
(defoperator $avg)
|
(defoperator $avg)
|
||||||
|
(defoperator $stdDevPop)
|
||||||
|
(defoperator $stdDevSamp)
|
||||||
(defoperator $sum)
|
(defoperator $sum)
|
||||||
|
(defoperator $let)
|
||||||
|
|
||||||
|
(defoperator $first)
|
||||||
|
(defoperator $last)
|
||||||
|
|
||||||
|
(defoperator $abs)
|
||||||
(defoperator $add)
|
(defoperator $add)
|
||||||
|
(defoperator $ceil)
|
||||||
(defoperator $divide)
|
(defoperator $divide)
|
||||||
|
(defoperator $exp)
|
||||||
|
(defoperator $floor)
|
||||||
|
(defoperator $ln)
|
||||||
|
(defoperator $log)
|
||||||
|
(defoperator $log10)
|
||||||
(defoperator $multiply)
|
(defoperator $multiply)
|
||||||
(defoperator $substract)
|
(defoperator $pow)
|
||||||
|
(defoperator $round)
|
||||||
|
(defoperator $sqrt)
|
||||||
|
(defoperator $subtract)
|
||||||
|
(defoperator $trunc)
|
||||||
|
(defoperator $literal)
|
||||||
|
|
||||||
|
(defoperator $arrayElemAt)
|
||||||
|
(defoperator $arrayToObject)
|
||||||
|
(defoperator $concatArrays)
|
||||||
|
(defoperator $filter)
|
||||||
|
(defoperator $indexOfArray)
|
||||||
|
(defoperator $isArray)
|
||||||
|
(defoperator $map)
|
||||||
|
(defoperator $objectToArray)
|
||||||
|
(defoperator $range)
|
||||||
|
(defoperator $reduce)
|
||||||
|
(defoperator $reverseArray)
|
||||||
|
(defoperator $zip)
|
||||||
|
(defoperator $mergeObjects)
|
||||||
|
|
||||||
|
(defoperator $allElementsTrue)
|
||||||
|
(defoperator $anyElementsTrue)
|
||||||
|
(defoperator $setDifference)
|
||||||
|
(defoperator $setEquals)
|
||||||
|
(defoperator $setIntersection)
|
||||||
|
(defoperator $setIsSubset)
|
||||||
|
(defoperator $setUnion)
|
||||||
|
|
||||||
(defoperator $strcasecmp)
|
(defoperator $strcasecmp)
|
||||||
(defoperator $substr)
|
(defoperator $substr)
|
||||||
|
(defoperator $substrBytes)
|
||||||
|
(defoperator $substrCP)
|
||||||
(defoperator $toLower)
|
(defoperator $toLower)
|
||||||
|
(defoperator $toString)
|
||||||
(defoperator $toUpper)
|
(defoperator $toUpper)
|
||||||
|
(defoperator $concat)
|
||||||
|
(defoperator $indexOfBytes)
|
||||||
|
(defoperator $indexOfCP)
|
||||||
|
(defoperator $ltrim)
|
||||||
|
(defoperator $regexFind)
|
||||||
|
(defoperator $regexFindAll)
|
||||||
|
(defoperator $regexMatch)
|
||||||
|
(defoperator $rtrim)
|
||||||
|
(defoperator $split)
|
||||||
|
(defoperator $strLenBytes)
|
||||||
|
(defoperator $subLenCP)
|
||||||
|
(defoperator $trim)
|
||||||
|
|
||||||
|
(defoperator $sin)
|
||||||
|
(defoperator $cos)
|
||||||
|
(defoperator $tan)
|
||||||
|
(defoperator $asin)
|
||||||
|
(defoperator $acos)
|
||||||
|
(defoperator $atan)
|
||||||
|
(defoperator $atan2)
|
||||||
|
(defoperator $asinh)
|
||||||
|
(defoperator $acosh)
|
||||||
|
(defoperator $atanh)
|
||||||
|
(defoperator $radiansToDegrees)
|
||||||
|
(defoperator $degreesToRadians)
|
||||||
|
|
||||||
|
(defoperator $convert)
|
||||||
|
(defoperator $toBool)
|
||||||
|
(defoperator $toDecimal)
|
||||||
|
(defoperator $toDouble)
|
||||||
|
(defoperator $toInt)
|
||||||
|
(defoperator $toLong)
|
||||||
|
(defoperator $toObjectId)
|
||||||
|
|
||||||
(defoperator $dayOfMonth)
|
(defoperator $dayOfMonth)
|
||||||
(defoperator $dayOfWeek)
|
(defoperator $dayOfWeek)
|
||||||
|
|
@ -220,10 +402,58 @@
|
||||||
(defoperator $minute)
|
(defoperator $minute)
|
||||||
(defoperator $month)
|
(defoperator $month)
|
||||||
(defoperator $second)
|
(defoperator $second)
|
||||||
|
(defoperator $millisecond)
|
||||||
(defoperator $week)
|
(defoperator $week)
|
||||||
(defoperator $year)
|
(defoperator $year)
|
||||||
(defoperator $isoDate)
|
(defoperator $isoDate)
|
||||||
|
(defoperator $dateFromParts)
|
||||||
|
(defoperator $dateFromString)
|
||||||
|
(defoperator $dateToParts)
|
||||||
|
(defoperator $dateToString)
|
||||||
|
(defoperator $isoDayOfWeek)
|
||||||
|
(defoperator $isoWeek)
|
||||||
|
(defoperator $isoWeekYear)
|
||||||
|
(defoperator $toDate)
|
||||||
|
|
||||||
|
|
||||||
(defoperator $ifNull)
|
(defoperator $ifNull)
|
||||||
(defoperator $cond)
|
(defoperator $cond)
|
||||||
|
(defoperator $switch)
|
||||||
|
|
||||||
|
;; Geospatial
|
||||||
|
(declare $geoWithin $geoIntersects $near $nearSphere $geometry $maxDistance $minDistance $center $centerSphere $box $polygon $slice)
|
||||||
|
(defoperator $geoWithin)
|
||||||
|
(defoperator $geoIntersects)
|
||||||
|
(defoperator $near)
|
||||||
|
(defoperator $nearSphere)
|
||||||
|
(defoperator $geometry)
|
||||||
|
(defoperator $maxDistance)
|
||||||
|
(defoperator $minDistance)
|
||||||
|
(defoperator $center)
|
||||||
|
(defoperator $centerSphere)
|
||||||
|
(defoperator $box)
|
||||||
|
(defoperator $polygon)
|
||||||
|
|
||||||
|
(defoperator $slice)
|
||||||
|
|
||||||
|
;; full text search
|
||||||
|
(declare $text $meta $search $language $natural $currentDate $isolated $count)
|
||||||
|
(defoperator $text)
|
||||||
|
(defoperator $meta)
|
||||||
|
(defoperator $search)
|
||||||
|
(defoperator $language)
|
||||||
|
(defoperator $natural)
|
||||||
|
|
||||||
|
;; $currentDate operator sets the value of a field to the current date, either as a Date or a timestamp. The default type is Date.
|
||||||
|
;;
|
||||||
|
;; EXAMPLES:
|
||||||
|
;; (mgcol/update coll { :_id oid } { $currentDate { :lastModified true } })
|
||||||
|
(defoperator $currentDate)
|
||||||
|
|
||||||
|
;; Isolates intermediate multi-document updates from other clients.
|
||||||
|
;;
|
||||||
|
;; EXAMPLES:
|
||||||
|
;; (mgcol/update "libraries" { :language "Clojure", $isolated 1 } { $inc { :popularity 1 } } {:multi true})
|
||||||
|
(defoperator $isolated)
|
||||||
|
|
||||||
|
(defoperator $count)
|
||||||
|
|
@ -1,31 +1,56 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; The APL v2.0:
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;;
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; ----------------------------------------------------------------------------------
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; the terms of this license.
|
;;
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns ^{:doc "Provides an expressive Query DSL that is very close to that in the Mongo shell (within reason).
|
(ns monger.query
|
||||||
This is the most flexible and recommended way to query with Monger. Queries can be composed, like in Korma.
|
"Provides an expressive Query DSL that is very close to that in the Mongo shell (within reason).
|
||||||
|
This is the most flexible and recommended way to query with Monger. Queries can be composed, like in Korma.
|
||||||
|
|
||||||
Related documentation guide: http://clojuremongodb.info/articles/querying.html"}
|
Related documentation guide: http://clojuremongodb.info/articles/querying.html"
|
||||||
monger.query
|
|
||||||
(:refer-clojure :exclude [select find sort])
|
(:refer-clojure :exclude [select find sort])
|
||||||
(:require [monger.core]
|
(:require [monger.core]
|
||||||
[monger.internal pagination])
|
[monger.internal pagination]
|
||||||
|
[monger.cursor :as cursor :refer [add-options]]
|
||||||
|
[monger.conversion :refer :all]
|
||||||
|
[monger.operators :refer :all])
|
||||||
(:import [com.mongodb DB DBCollection DBObject DBCursor ReadPreference]
|
(:import [com.mongodb DB DBCollection DBObject DBCursor ReadPreference]
|
||||||
[java.util List])
|
[java.util.concurrent TimeUnit]
|
||||||
(:use [monger conversion operators]))
|
java.util.List))
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; Implementation
|
;; Implementation
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(def ^{:dynamic true} *query-collection*)
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; Cursor/chain methods
|
;; Cursor/chain methods
|
||||||
;;
|
;;
|
||||||
|
|
@ -41,7 +66,6 @@
|
||||||
;; :skip - Skips the first N results.
|
;; :skip - Skips the first N results.
|
||||||
;; :limit - Returns a maximum of N results.
|
;; :limit - Returns a maximum of N results.
|
||||||
;; :batch-size - limits the nubmer of elements returned in one batch.
|
;; :batch-size - limits the nubmer of elements returned in one batch.
|
||||||
;; :hint - force Mongo to use a specific index for a query in order to improve performance.
|
|
||||||
;; :snapshot - sses snapshot mode for the query. Snapshot mode assures no duplicates are returned, or objects missed
|
;; :snapshot - sses snapshot mode for the query. Snapshot mode assures no duplicates are returned, or objects missed
|
||||||
;; which were present at both the start and end of the query's execution (if an object is new during the query, or
|
;; which were present at both the start and end of the query's execution (if an object is new during the query, or
|
||||||
;; deleted during the query, it may or may not be returned, even with snapshot mode). Note that short query responses
|
;; deleted during the query, it may or may not be returned, even with snapshot mode). Note that short query responses
|
||||||
|
|
@ -55,7 +79,6 @@
|
||||||
:skip 0
|
:skip 0
|
||||||
:limit 0
|
:limit 0
|
||||||
:batch-size 256
|
:batch-size 256
|
||||||
:hint nil
|
|
||||||
:snapshot false
|
:snapshot false
|
||||||
:keywordize-fields true
|
:keywordize-fields true
|
||||||
})
|
})
|
||||||
|
|
@ -63,17 +86,35 @@
|
||||||
(merge (empty-query) { :collection coll })))
|
(merge (empty-query) { :collection coll })))
|
||||||
|
|
||||||
(defn exec
|
(defn exec
|
||||||
[{ :keys [^DBCollection collection query fields skip limit sort batch-size hint snapshot read-preference keywordize-fields] :or { limit 0 batch-size 256 skip 0 } }]
|
[{:keys [^DBCollection collection
|
||||||
(let [cursor (doto (.find collection (to-db-object query) (as-field-selector fields))
|
query
|
||||||
(.limit limit)
|
fields
|
||||||
(.skip skip)
|
skip
|
||||||
(.sort (to-db-object sort))
|
limit
|
||||||
(.batchSize batch-size)
|
sort
|
||||||
(.hint (to-db-object hint)))]
|
batch-size
|
||||||
|
hint
|
||||||
|
snapshot
|
||||||
|
read-preference
|
||||||
|
keywordize-fields
|
||||||
|
max-time
|
||||||
|
options]
|
||||||
|
:or { limit 0 batch-size 256 skip 0 } }]
|
||||||
|
(with-open [cursor (doto (.find collection (to-db-object query) (as-field-selector fields))
|
||||||
|
(.limit limit)
|
||||||
|
(.skip skip)
|
||||||
|
(.sort (to-db-object sort))
|
||||||
|
(.batchSize batch-size))]
|
||||||
(when snapshot
|
(when snapshot
|
||||||
(.snapshot cursor))
|
(.snapshot cursor))
|
||||||
|
(when hint
|
||||||
|
(.hint cursor (to-db-object hint)))
|
||||||
(when read-preference
|
(when read-preference
|
||||||
(.setReadPreference cursor read-preference))
|
(.setReadPreference cursor read-preference))
|
||||||
|
(when max-time
|
||||||
|
(.maxTime cursor max-time TimeUnit/MILLISECONDS))
|
||||||
|
(when options
|
||||||
|
(add-options cursor options))
|
||||||
(map (fn [x] (from-db-object x keywordize-fields))
|
(map (fn [x] (from-db-object x keywordize-fields))
|
||||||
cursor)))
|
cursor)))
|
||||||
|
|
||||||
|
|
@ -117,6 +158,14 @@
|
||||||
[m ^ReadPreference rp]
|
[m ^ReadPreference rp]
|
||||||
(merge m { :read-preference rp }))
|
(merge m { :read-preference rp }))
|
||||||
|
|
||||||
|
(defn max-time
|
||||||
|
[m ^long max-time]
|
||||||
|
(merge m { :max-time max-time }))
|
||||||
|
|
||||||
|
(defn options
|
||||||
|
[m opts]
|
||||||
|
(merge m { :options opts }))
|
||||||
|
|
||||||
(defn keywordize-fields
|
(defn keywordize-fields
|
||||||
[m bool]
|
[m bool]
|
||||||
(merge m { :keywordize-fields bool }))
|
(merge m { :keywordize-fields bool }))
|
||||||
|
|
@ -126,12 +175,14 @@
|
||||||
(merge m { :limit per-page :skip (monger.internal.pagination/offset-for page per-page) }))
|
(merge m { :limit per-page :skip (monger.internal.pagination/offset-for page per-page) }))
|
||||||
|
|
||||||
(defmacro with-collection
|
(defmacro with-collection
|
||||||
[^String coll & body]
|
[db coll & body]
|
||||||
`(binding [*query-collection* (if (string? ~coll)
|
`(let [coll# ~coll
|
||||||
(.getCollection ^DB monger.core/*mongodb-database* ~coll)
|
^DB db# ~db
|
||||||
~coll)]
|
db-coll# (if (string? coll#)
|
||||||
(let [query# (-> (empty-query *query-collection*) ~@body)]
|
(.getCollection db# coll#)
|
||||||
(exec query#))))
|
coll#)
|
||||||
|
query# (-> (empty-query db-coll#) ~@body)]
|
||||||
|
(exec query#)))
|
||||||
|
|
||||||
(defmacro partial-query
|
(defmacro partial-query
|
||||||
[& body]
|
[& body]
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,43 @@
|
||||||
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
|
;;
|
||||||
|
;; The APL v2.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
|
;;
|
||||||
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns monger.ragtime
|
(ns monger.ragtime
|
||||||
|
"Ragtime integration"
|
||||||
(:refer-clojure :exclude [find sort])
|
(:refer-clojure :exclude [find sort])
|
||||||
(:require [ragtime.core :as ragtime]
|
(:require [ragtime.protocols :as proto]
|
||||||
[monger.core :as mg]
|
[monger.core :as mg]
|
||||||
[monger.collection :as mc])
|
[monger.collection :as mc]
|
||||||
(:use [monger.query :only [with-collection find sort]])
|
[monger.query :refer [with-collection find sort]])
|
||||||
(:import java.util.Date
|
(:import java.util.Date
|
||||||
[com.mongodb DB WriteConcern]))
|
[com.mongodb DB WriteConcern]))
|
||||||
|
|
||||||
|
|
@ -12,23 +46,20 @@
|
||||||
migrations-collection "meta.migrations")
|
migrations-collection "meta.migrations")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(extend-type com.mongodb.DB
|
(extend-type com.mongodb.DB
|
||||||
ragtime/Migratable
|
proto/DataStore
|
||||||
(add-migration-id [db id]
|
(add-migration-id [db id]
|
||||||
(mc/insert db migrations-collection {:_id id :created_at (Date.)} WriteConcern/FSYNC_SAFE))
|
(mc/insert db migrations-collection {:_id id :created_at (Date.)} WriteConcern/FSYNC_SAFE))
|
||||||
(remove-migration-id [db id]
|
(remove-migration-id [db id]
|
||||||
(mc/remove-by-id db migrations-collection id))
|
(mc/remove-by-id db migrations-collection id))
|
||||||
(applied-migration-ids [db]
|
(applied-migration-ids [db]
|
||||||
(mg/with-db db
|
(let [xs (with-collection db migrations-collection
|
||||||
(let [xs (with-collection migrations-collection
|
(find {})
|
||||||
(find {})
|
(sort {:created_at 1}))]
|
||||||
(sort {:created_at 1}))]
|
(vec (map :_id xs)))))
|
||||||
(set (map :_id xs))))))
|
|
||||||
|
|
||||||
|
|
||||||
(defn flush-migrations!
|
(defn flush-migrations!
|
||||||
"REMOVES all the information about previously performed migrations"
|
"REMOVES all the information about previously performed migrations"
|
||||||
[db]
|
[^DB db]
|
||||||
(mg/with-db db
|
(mc/remove db migrations-collection))
|
||||||
(mc/remove migrations-collection)))
|
|
||||||
|
|
|
||||||
|
|
@ -1,78 +1,72 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; The APL v2.0:
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;;
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; ----------------------------------------------------------------------------------
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; the terms of this license.
|
;;
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns ^{:doc "Provides functions that determine if a query (or other database operation)
|
(ns monger.result
|
||||||
was successful or not.
|
"Provides functions that determine if a query (or other database operation)
|
||||||
|
was successful or not.
|
||||||
|
|
||||||
Related documentation guides:
|
Related documentation guides:
|
||||||
|
|
||||||
* http://clojuremongodb.info/articles/inserting.html
|
* http://clojuremongodb.info/articles/inserting.html
|
||||||
* http://clojuremongodb.info/articles/updating.html
|
* http://clojuremongodb.info/articles/updating.html
|
||||||
* http://clojuremongodb.info/articles/commands.html
|
* http://clojuremongodb.info/articles/commands.html
|
||||||
* http://clojuremongodb.info/articles/mapreduce.html"}
|
* http://clojuremongodb.info/articles/mapreduce.html"
|
||||||
monger.result
|
(:import [com.mongodb WriteResult CommandResult])
|
||||||
(:import [com.mongodb DBObject WriteResult MapReduceOutput]
|
|
||||||
clojure.lang.IPersistentMap)
|
|
||||||
(:require monger.conversion))
|
(:require monger.conversion))
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; Implementation
|
|
||||||
;;
|
|
||||||
|
|
||||||
(defn- okayish?
|
|
||||||
[value]
|
|
||||||
(contains? #{true "true" 1 1.0} value))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; API
|
;; API
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defprotocol MongoCommandResult
|
(defprotocol WriteResultPredicates
|
||||||
(ok? [input] "Returns true if command result is a success")
|
(acknowledged? [input] "Returns true if write result is a success")
|
||||||
(has-error? [input] "Returns true if command result indicates an error")
|
(updated-existing? [input] "Returns true if write result has updated an existing document"))
|
||||||
(updated-existing? [input] "Returns true if command result has `updatedExisting` field set to true"))
|
|
||||||
|
|
||||||
(extend-protocol MongoCommandResult
|
|
||||||
DBObject
|
|
||||||
(ok?
|
|
||||||
[^DBObject result]
|
|
||||||
(okayish? (.get result "ok")))
|
|
||||||
(has-error?
|
|
||||||
[^DBObject result]
|
|
||||||
;; yes, this is exactly the logic MongoDB Java driver uses.
|
|
||||||
(> (count (str (.get result "err"))) 0))
|
|
||||||
(updated-existing?
|
|
||||||
[^DBObject result]
|
|
||||||
(let [v ^Boolean (.get result "updatedExisting")]
|
|
||||||
(and v (Boolean/valueOf v))))
|
|
||||||
|
|
||||||
|
|
||||||
|
(extend-protocol WriteResultPredicates
|
||||||
WriteResult
|
WriteResult
|
||||||
(ok?
|
(acknowledged?
|
||||||
[^WriteResult result]
|
[^WriteResult result]
|
||||||
(and (not (nil? result)) (ok? (.getLastError result))))
|
(.wasAcknowledged result))
|
||||||
(has-error?
|
|
||||||
[^WriteResult result]
|
|
||||||
(has-error? (.getLastError result)))
|
|
||||||
(updated-existing?
|
(updated-existing?
|
||||||
[^WriteResult result]
|
[^WriteResult result]
|
||||||
(updated-existing? (.getLastError result)))
|
(.isUpdateOfExisting result))
|
||||||
|
|
||||||
MapReduceOutput
|
CommandResult
|
||||||
(ok?
|
(acknowledged?
|
||||||
[^MapReduceOutput result]
|
[^CommandResult result]
|
||||||
(ok? ^DBObject (.getRaw result)))
|
(.ok result)))
|
||||||
|
|
||||||
IPersistentMap
|
(defn affected-count
|
||||||
(ok?
|
"Get the number of documents affected"
|
||||||
[^IPersistentMap m]
|
[^WriteResult result]
|
||||||
(okayish? (or (get m :ok)
|
(.getN result))
|
||||||
(get m "ok")))))
|
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,44 @@
|
||||||
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
|
;;
|
||||||
|
;; The APL v2.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
|
;;
|
||||||
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns monger.ring.session-store
|
(ns monger.ring.session-store
|
||||||
(:require [ring.middleware.session.store :as ringstore]
|
(:require [ring.middleware.session.store :as ringstore]
|
||||||
[monger.collection :as mc])
|
[monger.collection :as mc]
|
||||||
(:use monger.conversion)
|
[monger.core :as mg]
|
||||||
(:import [java.util UUID Date]))
|
[monger.conversion :refer :all])
|
||||||
|
(:import [java.util UUID Date]
|
||||||
|
[com.mongodb DB]
|
||||||
|
ring.middleware.session.store.SessionStore))
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; Implementation
|
;; Implementation
|
||||||
|
|
@ -18,29 +54,82 @@
|
||||||
;; API
|
;; API
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(defrecord MongoDBSessionStore [^String collection-name])
|
;; this session store stores Clojure data structures using Clojure reader. It will correctly store every
|
||||||
|
;; data structure Clojure reader can serialize and read but won't make the data useful to applications
|
||||||
|
;; in other languages.
|
||||||
|
|
||||||
|
(defrecord ClojureReaderBasedMongoDBSessionStore [^DB db ^String collection-name])
|
||||||
|
|
||||||
|
(defmethod print-dup java.util.Date
|
||||||
|
[^java.util.Date d ^java.io.Writer out]
|
||||||
|
(.write out
|
||||||
|
(str "#="
|
||||||
|
`(java.util.Date. ~(.getYear d)
|
||||||
|
~(.getMonth d)
|
||||||
|
~(.getDate d)
|
||||||
|
~(.getHours d)
|
||||||
|
~(.getMinutes d)
|
||||||
|
~(.getSeconds d)))))
|
||||||
|
|
||||||
|
(defmethod print-dup org.bson.types.ObjectId
|
||||||
|
[oid ^java.io.Writer out]
|
||||||
|
(.write out
|
||||||
|
(str "#="
|
||||||
|
`(org.bson.types.ObjectId. ~(str oid)))))
|
||||||
|
|
||||||
|
|
||||||
|
(extend-protocol ringstore/SessionStore
|
||||||
|
ClojureReaderBasedMongoDBSessionStore
|
||||||
|
|
||||||
|
(read-session [store key]
|
||||||
|
(if key
|
||||||
|
(if-let [m (mc/find-one-as-map (.db store) (.collection-name store) {:_id key})]
|
||||||
|
(read-string (:value m))
|
||||||
|
{})
|
||||||
|
{}))
|
||||||
|
|
||||||
|
(write-session [store key data]
|
||||||
|
(let [date (Date.)
|
||||||
|
key (or key (str (UUID/randomUUID)))
|
||||||
|
value (binding [*print-dup* true]
|
||||||
|
(pr-str (assoc data :_id key)))]
|
||||||
|
(mc/save (.db store) (.collection-name store) {:_id key :value value :date date})
|
||||||
|
key))
|
||||||
|
|
||||||
|
(delete-session [store key]
|
||||||
|
(mc/remove-by-id (.db store) (.collection-name store) key)
|
||||||
|
nil))
|
||||||
|
|
||||||
|
|
||||||
|
(defn session-store
|
||||||
|
[^DB db ^String s]
|
||||||
|
(ClojureReaderBasedMongoDBSessionStore. db s))
|
||||||
|
|
||||||
|
|
||||||
|
;; this session store won't store namespaced keywords correctly but stores results in a way
|
||||||
|
;; that applications in other languages can read. DO NOT use it with Friend.
|
||||||
|
|
||||||
|
(defrecord MongoDBSessionStore [^DB db ^String collection-name])
|
||||||
|
|
||||||
(extend-protocol ringstore/SessionStore
|
(extend-protocol ringstore/SessionStore
|
||||||
MongoDBSessionStore
|
MongoDBSessionStore
|
||||||
|
|
||||||
(read-session [store key]
|
(read-session [store key]
|
||||||
(if-let [m (and key
|
(if-let [m (and key
|
||||||
(mc/find-one-as-map (.collection-name store) {:_id key}))]
|
(mc/find-one-as-map (.db store) (.collection-name store) {:_id key}))]
|
||||||
m
|
m
|
||||||
{}))
|
{}))
|
||||||
|
|
||||||
(write-session [store key data]
|
(write-session [store key data]
|
||||||
(let [key (or key (str (UUID/randomUUID)))]
|
(let [key (or key (str (UUID/randomUUID)))]
|
||||||
(mc/save (.collection-name store) (assoc data :date (Date.) :_id key))
|
(mc/save (.db store) (.collection-name store) (assoc data :date (Date.) :_id key))
|
||||||
key))
|
key))
|
||||||
|
|
||||||
(delete-session [store key]
|
(delete-session [store key]
|
||||||
(mc/remove-by-id (.collection-name store) key)
|
(mc/remove-by-id (.db store) (.collection-name store) key)
|
||||||
nil))
|
nil))
|
||||||
|
|
||||||
|
|
||||||
(defn monger-store
|
(defn monger-store
|
||||||
([]
|
[^DB db ^String s]
|
||||||
(MongoDBSessionStore. default-session-store-collection))
|
(MongoDBSessionStore. db s))
|
||||||
([^String s]
|
|
||||||
(MongoDBSessionStore. s)))
|
|
||||||
|
|
|
||||||
|
|
@ -1,147 +0,0 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
|
||||||
;;
|
|
||||||
;; The use and distribution terms for this software are covered by the
|
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
|
||||||
;; the terms of this license.
|
|
||||||
;; You must not remove this notice, or any other, from this software.
|
|
||||||
|
|
||||||
(ns ^{:doc "Monger TestKit is an experiment that turned out to be partially successful but partially need to be
|
|
||||||
rethough, redesigned, integrated with MongoDB DB references and simply reimplemented from the ground up
|
|
||||||
one more time. For this exact reason, there is no documentation guide on it.
|
|
||||||
Please keep this in mind if you are considering using it."}
|
|
||||||
monger.testkit
|
|
||||||
(:require [monger.collection :as mc]
|
|
||||||
[monger.result :as mr])
|
|
||||||
(:use [monger.internal.fn :only (expand-all expand-all-with) :as fntools])
|
|
||||||
(:import org.bson.types.ObjectId))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; API
|
|
||||||
;;
|
|
||||||
|
|
||||||
(defmacro defcleaner
|
|
||||||
"Defines a fixture function that removes all documents from a collection. If collection is not specified,
|
|
||||||
a conventionally named var will be used. Supposed to be used with clojure.test/use-fixtures but may
|
|
||||||
be useful on its own.
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
(defcleaner events) ;; collection name will be taken from the events-collection var
|
|
||||||
(defcleaner people \"accounts\") ;; collection name is given
|
|
||||||
"
|
|
||||||
[entities & coll-name]
|
|
||||||
(let [coll-arg (if coll-name
|
|
||||||
(str (first coll-name))
|
|
||||||
(symbol (str entities "-collection")))
|
|
||||||
fn-name (symbol (str "purge-" entities))]
|
|
||||||
`(defn ~fn-name
|
|
||||||
[f#]
|
|
||||||
(mc/remove ~coll-arg)
|
|
||||||
(f#)
|
|
||||||
(mc/remove ~coll-arg))))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(def factories (atom {}))
|
|
||||||
(def defaults (atom {}))
|
|
||||||
(def last-oids (atom {}))
|
|
||||||
|
|
||||||
(defn defaults-for
|
|
||||||
[f-group & { :as attributes }]
|
|
||||||
(swap! defaults (fn [v]
|
|
||||||
(assoc v (name f-group) attributes))))
|
|
||||||
|
|
||||||
(defn factory
|
|
||||||
[f-group f-name & { :as attributes }]
|
|
||||||
(swap! factories (fn [a]
|
|
||||||
(assoc-in a [(name f-group) (name f-name)] attributes))))
|
|
||||||
|
|
||||||
|
|
||||||
(declare build seed remember-oid)
|
|
||||||
(defn- expand-associate-for-building
|
|
||||||
[f]
|
|
||||||
(let [mt (meta f)
|
|
||||||
[f-group f-name] (f)]
|
|
||||||
(:_id (build f-group f-name))))
|
|
||||||
|
|
||||||
(defn- expand-for-building
|
|
||||||
"Expands functions, treating those with association metadata (see `parent-id` for example) specially"
|
|
||||||
[f]
|
|
||||||
(let [mt (meta f)]
|
|
||||||
(if (:associate-gen mt)
|
|
||||||
(expand-associate-for-building f)
|
|
||||||
(f))))
|
|
||||||
|
|
||||||
(defn- expand-associate-for-seeding
|
|
||||||
[f]
|
|
||||||
(let [mt (meta f)
|
|
||||||
[f-group f-name] (f)]
|
|
||||||
(:_id (seed f-group f-name))))
|
|
||||||
|
|
||||||
(defn- expand-for-seeding
|
|
||||||
"Expands functions, treating those with association metadata (see `parent-id` for example) specially,
|
|
||||||
making sure parent documents are persisted first"
|
|
||||||
[f]
|
|
||||||
(let [mt (meta f)]
|
|
||||||
(if (:associate-gen mt)
|
|
||||||
(expand-associate-for-seeding f)
|
|
||||||
(f))))
|
|
||||||
|
|
||||||
(defn build
|
|
||||||
"Generates a new document and returns it.
|
|
||||||
Unless _id field is defined by the factory, it is generated."
|
|
||||||
[f-group f-name & { :as overrides }]
|
|
||||||
(let [d (@defaults (name f-group))
|
|
||||||
attributes (get-in @factories [(name f-group) (name f-name)])
|
|
||||||
merged (merge { :_id (ObjectId.) } d attributes overrides)]
|
|
||||||
(expand-all-with merged expand-for-building)))
|
|
||||||
|
|
||||||
(defn seed
|
|
||||||
"Generates and inserts a new document, then returns it.
|
|
||||||
Unless _id field is defined by the factory, it is generated."
|
|
||||||
[f-group f-name & { :as overrides }]
|
|
||||||
(io!
|
|
||||||
(let [d (@defaults (name f-group))
|
|
||||||
attributes (get-in @factories [(name f-group) (name f-name)])
|
|
||||||
merged (merge { :_id (ObjectId.) } d attributes overrides)
|
|
||||||
expanded (expand-all-with merged expand-for-seeding)]
|
|
||||||
(assert (mr/ok? (mc/insert f-group expanded)))
|
|
||||||
(remember-oid f-group f-name (:_id expanded))
|
|
||||||
expanded)))
|
|
||||||
|
|
||||||
(defn seed-all
|
|
||||||
"Seeds all fixtures in the given collection"
|
|
||||||
[f-group]
|
|
||||||
(io!
|
|
||||||
(let [xs (vec (keys (get @factories f-group)))]
|
|
||||||
(doseq [f-name xs]
|
|
||||||
(seed f-group f-name)))))
|
|
||||||
|
|
||||||
(defn embedded-doc
|
|
||||||
[f-group f-name & { :as overrides }]
|
|
||||||
(fn []
|
|
||||||
(apply build f-group f-name (flatten (vec overrides)))))
|
|
||||||
|
|
||||||
(defn parent-id
|
|
||||||
[f-group f-name]
|
|
||||||
(with-meta (fn []
|
|
||||||
[f-group f-name]) { :associate-gen true :parent-gen true }))
|
|
||||||
|
|
||||||
(defn- remember-oid
|
|
||||||
[f-group f-name oid]
|
|
||||||
(swap! last-oids (fn [a]
|
|
||||||
(assoc-in a [(name f-group) (name f-name)] oid))))
|
|
||||||
|
|
||||||
(defn last-oid-of
|
|
||||||
"Returns last object id of a document inserted using given factory"
|
|
||||||
[f-group f-name]
|
|
||||||
(get-in @last-oids [(name f-group) (name f-name)]))
|
|
||||||
|
|
||||||
|
|
||||||
(def ^{ :doc "Returns a new object id. Generates it if needed, otherwise returns a cached version.
|
|
||||||
Useful for defining referenced associations between fixture documents." }
|
|
||||||
memoized-oid (memoize (fn [f-group f-name]
|
|
||||||
(ObjectId.))))
|
|
||||||
|
|
@ -1,19 +1,45 @@
|
||||||
;; Copyright (c) 2011-2012 Michael S. Klishin
|
;; This source code is dual-licensed under the Apache License, version
|
||||||
|
;; 2.0, and the Eclipse Public License, version 1.0.
|
||||||
;;
|
;;
|
||||||
;; The use and distribution terms for this software are covered by the
|
;; The APL v2.0:
|
||||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
;;
|
||||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
;; ----------------------------------------------------------------------------------
|
||||||
;; By using this software in any fashion, you are agreeing to be bound by
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team
|
||||||
;; the terms of this license.
|
;;
|
||||||
;; You must not remove this notice, or any other, from this software.
|
;; Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
;; you may not use this file except in compliance with the License.
|
||||||
|
;; You may obtain a copy of the License at
|
||||||
|
;;
|
||||||
|
;; http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
;;
|
||||||
|
;; Unless required by applicable law or agreed to in writing, software
|
||||||
|
;; distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
;; See the License for the specific language governing permissions and
|
||||||
|
;; limitations under the License.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;;
|
||||||
|
;; The EPL v1.0:
|
||||||
|
;;
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
;; Copyright (c) 2011-2018 Michael S. Klishin, Alex Petrov, and the ClojureWerkz Team.
|
||||||
|
;; All rights reserved.
|
||||||
|
;;
|
||||||
|
;; This program and the accompanying materials are made available under the terms of
|
||||||
|
;; the Eclipse Public License Version 1.0,
|
||||||
|
;; which accompanies this distribution and is available at
|
||||||
|
;; http://www.eclipse.org/legal/epl-v10.html.
|
||||||
|
;; ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
(ns ^{:doc "Provides various utility functions, primarily for working with document ids."} monger.util
|
(ns ^{:doc "Provides various utility functions, primarily for working with document ids."} monger.util
|
||||||
|
(:refer-clojure :exclude [random-uuid])
|
||||||
(:import java.security.SecureRandom
|
(:import java.security.SecureRandom
|
||||||
java.math.BigInteger
|
java.math.BigInteger
|
||||||
org.bson.types.ObjectId
|
org.bson.types.ObjectId
|
||||||
com.mongodb.DBObject
|
com.mongodb.DBObject
|
||||||
clojure.lang.IPersistentMap
|
clojure.lang.IPersistentMap
|
||||||
java.util.Map))
|
java.util.Map)
|
||||||
|
(:refer-clojure :exclude [random-uuid]))
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; API
|
;; API
|
||||||
|
|
@ -30,9 +56,11 @@
|
||||||
(.toString (new BigInteger n (SecureRandom.)) num-base))
|
(.toString (new BigInteger n (SecureRandom.)) num-base))
|
||||||
|
|
||||||
(defn ^ObjectId object-id
|
(defn ^ObjectId object-id
|
||||||
"Returns a new BSON object id"
|
"Returns a new BSON object id, or converts str to BSON object id"
|
||||||
[]
|
([]
|
||||||
(ObjectId.))
|
(ObjectId.))
|
||||||
|
([^String s]
|
||||||
|
(ObjectId. s)))
|
||||||
|
|
||||||
(defprotocol GetDocumentId
|
(defprotocol GetDocumentId
|
||||||
(get-id [input] "Returns document id"))
|
(get-id [input] "Returns document id"))
|
||||||
|
|
@ -47,3 +75,8 @@
|
||||||
(get-id
|
(get-id
|
||||||
[^IPersistentMap object]
|
[^IPersistentMap object]
|
||||||
(or (:_id object) (object "_id"))))
|
(or (:_id object) (object "_id"))))
|
||||||
|
|
||||||
|
(defn into-array-list
|
||||||
|
"Coerce a j.u.Collection into a j.u.ArrayList."
|
||||||
|
^java.util.ArrayList [^java.util.Collection coll]
|
||||||
|
(java.util.ArrayList. coll))
|
||||||
|
|
|
||||||
|
|
@ -1,44 +0,0 @@
|
||||||
package com.novemberain.monger;
|
|
||||||
|
|
||||||
import clojure.lang.IDeref;
|
|
||||||
import com.mongodb.DB;
|
|
||||||
import com.mongodb.DBObject;
|
|
||||||
import org.bson.BSONObject;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Exactly as com.mongodb.DBRef but also implements Clojure IDeref for @dereferencing
|
|
||||||
*/
|
|
||||||
public class DBRef extends com.mongodb.DBRef implements IDeref {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a DBRef
|
|
||||||
* @param db the database
|
|
||||||
* @param o a BSON object representing the reference
|
|
||||||
*/
|
|
||||||
public DBRef(DB db, BSONObject o) {
|
|
||||||
super(db , o.get("$ref").toString(), o.get("$id"));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a DBRef
|
|
||||||
* @param db the database
|
|
||||||
* @param ns the namespace where the object is stored
|
|
||||||
* @param id the object id
|
|
||||||
*/
|
|
||||||
public DBRef(DB db, String ns, Object id) {
|
|
||||||
super(db, ns, id);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a DBRef from a com.mongodb.DBRef instance.
|
|
||||||
* @param source The original reference MongoDB Java driver uses
|
|
||||||
*/
|
|
||||||
public DBRef(com.mongodb.DBRef source) {
|
|
||||||
this(source.getDB(), source.getRef(), source.getId());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public DBObject deref() {
|
|
||||||
return this.fetch();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,69 +1,136 @@
|
||||||
(ns monger.test.aggregation-framework-test
|
(ns monger.test.aggregation-framework-test
|
||||||
(:require monger.core [monger.collection :as mc]
|
(:require [monger.core :as mg]
|
||||||
[monger.test.helper :as helper])
|
[monger.collection :as mc]
|
||||||
(:use clojure.test
|
[clojure.test :refer :all]
|
||||||
monger.operators
|
[monger.operators :refer :all]))
|
||||||
monger.test.fixtures))
|
|
||||||
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")
|
||||||
|
coll "docs"]
|
||||||
|
(defn purge-collections
|
||||||
|
[f]
|
||||||
|
(mc/purge-many db [coll])
|
||||||
|
(f)
|
||||||
|
(mc/purge-many db [coll]))
|
||||||
|
|
||||||
|
(use-fixtures :each purge-collections)
|
||||||
|
|
||||||
|
(deftest test-basic-single-stage-$project-aggregation-no-keywordize
|
||||||
|
(let [batch [{"state" "CA" "quantity" 1 "price" 199.00}
|
||||||
|
{"state" "NY" "quantity" 2 "price" 199.00}
|
||||||
|
{"state" "NY" "quantity" 1 "price" 299.00}
|
||||||
|
{"state" "IL" "quantity" 2 "price" 11.50 }
|
||||||
|
{"state" "CA" "quantity" 2 "price" 2.95 }
|
||||||
|
{"state" "IL" "quantity" 3 "price" 5.50 }]
|
||||||
|
expected #{{"quantity" 1 "state" "CA"}
|
||||||
|
{"quantity" 2 "state" "NY"}
|
||||||
|
{"quantity" 1 "state" "NY"}
|
||||||
|
{"quantity" 2 "state" "IL"}
|
||||||
|
{"quantity" 2 "state" "CA"}
|
||||||
|
{"quantity" 3 "state" "IL"}}]
|
||||||
|
(mc/insert-batch db coll batch)
|
||||||
|
(is (= 6 (mc/count db coll)))
|
||||||
|
(let [result (->>
|
||||||
|
(mc/aggregate db coll [{$project {"state" 1 "quantity" 1}}] :keywordize false)
|
||||||
|
(map #(select-keys % ["state" "quantity"]))
|
||||||
|
(set))]
|
||||||
|
(is (= expected result)))))
|
||||||
|
|
||||||
|
(deftest test-basic-single-stage-$project-aggregation
|
||||||
|
(let [batch [{:state "CA" :quantity 1 :price 199.00}
|
||||||
|
{:state "NY" :quantity 2 :price 199.00}
|
||||||
|
{:state "NY" :quantity 1 :price 299.00}
|
||||||
|
{:state "IL" :quantity 2 :price 11.50 }
|
||||||
|
{:state "CA" :quantity 2 :price 2.95 }
|
||||||
|
{:state "IL" :quantity 3 :price 5.50 }]
|
||||||
|
expected #{{:quantity 1 :state "CA"}
|
||||||
|
{:quantity 2 :state "NY"}
|
||||||
|
{:quantity 1 :state "NY"}
|
||||||
|
{:quantity 2 :state "IL"}
|
||||||
|
{:quantity 2 :state "CA"}
|
||||||
|
{:quantity 3 :state "IL"}}]
|
||||||
|
(mc/insert-batch db coll batch)
|
||||||
|
(is (= 6 (mc/count db coll)))
|
||||||
|
(let [result (set (map #(select-keys % [:state :quantity])
|
||||||
|
(mc/aggregate db coll [{$project {:state 1 :quantity 1}}])))]
|
||||||
|
(is (= expected result)))))
|
||||||
|
|
||||||
|
|
||||||
(helper/connect!)
|
(deftest test-basic-projection-with-multiplication
|
||||||
|
(let [batch [{:state "CA" :quantity 1 :price 199.00}
|
||||||
(use-fixtures :each purge-docs)
|
{:state "NY" :quantity 2 :price 199.00}
|
||||||
|
{:state "NY" :quantity 1 :price 299.00}
|
||||||
(deftest ^{:edge-features true} test-basic-single-stage-$project-aggregation
|
{:state "IL" :quantity 2 :price 11.50 }
|
||||||
(let [collection "docs"
|
{:state "CA" :quantity 2 :price 2.95 }
|
||||||
batch [{ :state "CA" :quantity 1 :price 199.00 }
|
{:state "IL" :quantity 3 :price 5.50 }]
|
||||||
{ :state "NY" :quantity 2 :price 199.00 }
|
expected #{{:_id "NY" :subtotal 398.0}
|
||||||
{ :state "NY" :quantity 1 :price 299.00 }
|
{:_id "NY" :subtotal 299.0}
|
||||||
{ :state "IL" :quantity 2 :price 11.50 }
|
{:_id "IL" :subtotal 23.0}
|
||||||
{ :state "CA" :quantity 2 :price 2.95 }
|
{:_id "CA" :subtotal 5.9}
|
||||||
{ :state "IL" :quantity 3 :price 5.50 }]
|
{:_id "IL" :subtotal 16.5}
|
||||||
expected [{:quantity 1 :state "CA"}
|
{:_id "CA" :subtotal 199.0}}]
|
||||||
{:quantity 2 :state "NY"}
|
(mc/insert-batch db coll batch)
|
||||||
{:quantity 1 :state "NY"}
|
(let [result (set (mc/aggregate db coll [{$project {:subtotal {$multiply ["$quantity", "$price"]}
|
||||||
{:quantity 2 :state "IL"}
|
:_id "$state"}}]))]
|
||||||
{:quantity 2 :state "CA"}
|
(is (= expected result)))))
|
||||||
{:quantity 3 :state "IL"}]]
|
|
||||||
(mc/insert-batch collection batch)
|
|
||||||
(is (= 6 (mc/count collection)))
|
|
||||||
(let [result (vec (map #(select-keys % [:state :quantity])
|
|
||||||
(mc/aggregate "docs" [{$project {:state 1 :quantity 1}}])))]
|
|
||||||
(is (= expected result)))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest ^{:edge-features true} test-basic-projection-with-multiplication
|
(deftest test-basic-total-aggregation
|
||||||
(let [collection "docs"
|
(let [batch [{:state "CA" :quantity 1 :price 199.00}
|
||||||
batch [{ :state "CA" :quantity 1 :price 199.00 }
|
{:state "NY" :quantity 2 :price 199.00}
|
||||||
{ :state "NY" :quantity 2 :price 199.00 }
|
{:state "NY" :quantity 1 :price 299.00}
|
||||||
{ :state "NY" :quantity 1 :price 299.00 }
|
{:state "IL" :quantity 2 :price 11.50 }
|
||||||
{ :state "IL" :quantity 2 :price 11.50 }
|
{:state "CA" :quantity 2 :price 2.95 }
|
||||||
{ :state "CA" :quantity 2 :price 2.95 }
|
{:state "IL" :quantity 3 :price 5.50 }]
|
||||||
{ :state "IL" :quantity 3 :price 5.50 }]
|
expected #{{:_id "CA" :total 204.9} {:_id "IL" :total 39.5} {:_id "NY" :total 697.0}}]
|
||||||
expected [{:_id "NY" :subtotal 398.0}
|
(mc/insert-batch db coll batch)
|
||||||
{:_id "NY" :subtotal 299.0}
|
(let [result (set (mc/aggregate db coll [{$project {:subtotal {$multiply ["$quantity", "$price"]}
|
||||||
{:_id "IL" :subtotal 23.0}
|
:_id 1
|
||||||
{:_id "CA" :subtotal 5.9}
|
:state 1}}
|
||||||
{:_id "IL" :subtotal 16.5}
|
{$group {:_id "$state"
|
||||||
{:_id "CA" :subtotal 199.0}]]
|
:total {$sum "$subtotal"}}}]))]
|
||||||
(mc/insert-batch collection batch)
|
(is (= expected result)))))
|
||||||
(let [result (vec (mc/aggregate "docs" [{$project {:subtotal {$multiply ["$quantity", "$price"]}
|
|
||||||
:_id "$state"}}]))]
|
|
||||||
(is (= expected result)))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest ^{:edge-features true} test-basic-total-aggregation
|
(deftest test-$first-aggregation-operator
|
||||||
(let [collection "docs"
|
(let [batch [{:state "CA"}
|
||||||
batch [{ :state "CA" :quantity 1 :price 199.00 }
|
{:state "IL"}]
|
||||||
{ :state "NY" :quantity 2 :price 199.00 }
|
expected "CA"]
|
||||||
{ :state "NY" :quantity 1 :price 299.00 }
|
(mc/insert-batch db coll batch)
|
||||||
{ :state "IL" :quantity 2 :price 11.50 }
|
(let [result (:state (first (mc/aggregate db coll [{$group {:_id 1 :state {$first "$state"}}}])))]
|
||||||
{ :state "CA" :quantity 2 :price 2.95 }
|
(is (= expected result)))))
|
||||||
{ :state "IL" :quantity 3 :price 5.50 }]
|
|
||||||
expected [{:_id "CA", :total 204.9} {:_id "IL", :total 39.5} {:_id "NY", :total 697.0}]]
|
(deftest test-$last-aggregation-operator
|
||||||
(mc/insert-batch collection batch)
|
(let [batch [{:state "CA"}
|
||||||
(let [result (vec (mc/aggregate "docs" [{$project {:subtotal {$multiply ["$quantity", "$price"]}
|
{:state "IL"}]
|
||||||
:_id 1
|
expected "IL"]
|
||||||
:state 1}}
|
(mc/insert-batch db coll batch)
|
||||||
{$group {:_id "$state"
|
(let [result (:state (first (mc/aggregate db coll [{$group {:_id 1 :state {$last "$state"}}}])))]
|
||||||
:total {$sum "$subtotal"}}}]))]
|
(is (= expected result)))))
|
||||||
(is (= expected result)))))
|
|
||||||
|
(deftest test-cursor-aggregation
|
||||||
|
(let [batch [{:state "CA" :quantity 1 :price 199.00}
|
||||||
|
{:state "NY" :quantity 2 :price 199.00}
|
||||||
|
{:state "NY" :quantity 1 :price 299.00}
|
||||||
|
{:state "IL" :quantity 2 :price 11.50 }
|
||||||
|
{:state "CA" :quantity 2 :price 2.95 }
|
||||||
|
{:state "IL" :quantity 3 :price 5.50 }]
|
||||||
|
expected #{{:quantity 1 :state "CA"}
|
||||||
|
{:quantity 2 :state "NY"}
|
||||||
|
{:quantity 1 :state "NY"}
|
||||||
|
{:quantity 2 :state "IL"}
|
||||||
|
{:quantity 2 :state "CA"}
|
||||||
|
{:quantity 3 :state "IL"}}]
|
||||||
|
(mc/insert-batch db coll batch)
|
||||||
|
(is (= 6 (mc/count db coll)))
|
||||||
|
(let [result (set (map #(select-keys % [:state :quantity])
|
||||||
|
(mc/aggregate db coll [{$project {:state 1 :quantity 1}}] :cursor {:batch-size 10})))]
|
||||||
|
(is (= expected result)))))
|
||||||
|
|
||||||
|
(deftest test-explain-aggregate
|
||||||
|
(let [batch [{:state "CA" :price 100}
|
||||||
|
{:state "CA" :price 10}
|
||||||
|
{:state "IL" :price 50}]]
|
||||||
|
(mc/insert-batch db coll batch)
|
||||||
|
(let [result (mc/explain-aggregate db coll [{$match {:state "CA"}}])]
|
||||||
|
(is (:ok result))))))
|
||||||
|
|
|
||||||
|
|
@ -1,348 +1,461 @@
|
||||||
(set! *warn-on-reflection* true)
|
|
||||||
|
|
||||||
(ns monger.test.atomic-modifiers-test
|
(ns monger.test.atomic-modifiers-test
|
||||||
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject CommandResult$CommandFailure]
|
(:import [com.mongodb WriteResult WriteConcern DBObject]
|
||||||
[org.bson.types ObjectId]
|
org.bson.types.ObjectId
|
||||||
[java.util Date])
|
java.util.Date)
|
||||||
(:require [monger core util]
|
(:require [monger.core :as mg]
|
||||||
[monger.collection :as mgcol]
|
[monger.collection :as mc]
|
||||||
[monger.result :as mgres]
|
[monger.result :refer [acknowledged?]]
|
||||||
[monger.test.helper :as helper])
|
[clojure.test :refer :all]
|
||||||
(:use [clojure.test]
|
[monger.operators :refer :all]))
|
||||||
[monger.operators]
|
|
||||||
[monger.test.fixtures]))
|
|
||||||
|
(let [conn (mg/connect)
|
||||||
(helper/connect!)
|
db (mg/get-db conn "monger-test")]
|
||||||
|
|
||||||
(use-fixtures :each purge-docs purge-things purge-scores)
|
(defn purge-collections
|
||||||
|
[f]
|
||||||
|
(mc/remove db "docs")
|
||||||
;;
|
(mc/remove db "things")
|
||||||
;; $inc
|
(mc/remove db "scores")
|
||||||
;;
|
(f)
|
||||||
|
(mc/remove db "docs")
|
||||||
(deftest increment-a-single-existing-field-using-$inc-modifier
|
(mc/remove db "things")
|
||||||
(let [coll "scores"
|
(mc/remove db "scores"))
|
||||||
oid (ObjectId.)]
|
|
||||||
(mgcol/insert coll { :_id oid :username "l33r0y" :score 100 })
|
(use-fixtures :each purge-collections)
|
||||||
(mgcol/update coll { :_id oid } { $inc { :score 20 } })
|
|
||||||
(is (= 120 (:score (mgcol/find-map-by-id coll oid))))))
|
;;
|
||||||
|
;; $inc
|
||||||
(deftest set-a-single-non-existing-field-using-$inc-modifier
|
;;
|
||||||
(let [coll "scores"
|
|
||||||
oid (ObjectId.)]
|
(deftest increment-a-single-existing-field-using-$inc-modifier
|
||||||
(mgcol/insert coll { :_id oid :username "l33r0y" })
|
(let [coll "scores"
|
||||||
(mgcol/update coll { :_id oid } { $inc { :score 30 } })
|
oid (ObjectId.)]
|
||||||
(is (= 30 (:score (mgcol/find-map-by-id coll oid))))))
|
(mc/insert db coll {:_id oid :username "l33r0y" :score 100})
|
||||||
|
(mc/update db coll {:_id oid} {$inc {:score 20}})
|
||||||
|
(is (= 120 (:score (mc/find-map-by-id db coll oid))))))
|
||||||
(deftest increment-multiple-existing-fields-using-$inc-modifier
|
|
||||||
(let [coll "scores"
|
(deftest set-a-single-non-existing-field-using-$inc-modifier
|
||||||
oid (ObjectId.)]
|
(let [coll "scores"
|
||||||
(mgcol/insert coll { :_id oid :username "l33r0y" :score 100 :bonus 0 })
|
oid (ObjectId.)]
|
||||||
(mgcol/update coll { :_id oid } {$inc { :score 20 :bonus 10 } })
|
(mc/insert db coll {:_id oid :username "l33r0y"})
|
||||||
(is (= { :_id oid :score 120 :bonus 10 :username "l33r0y" } (mgcol/find-map-by-id coll oid)))))
|
(mc/update db coll {:_id oid} {$inc {:score 30}})
|
||||||
|
(is (= 30 (:score (mc/find-map-by-id db coll oid))))))
|
||||||
|
|
||||||
(deftest increment-and-set-multiple-existing-fields-using-$inc-modifier
|
|
||||||
(let [coll "scores"
|
(deftest increment-multiple-existing-fields-using-$inc-modifier
|
||||||
oid (ObjectId.)]
|
(let [coll "scores"
|
||||||
(mgcol/insert coll { :_id oid :username "l33r0y" :score 100 })
|
oid (ObjectId.)]
|
||||||
(mgcol/update coll { :_id oid } { $inc { :score 20 :bonus 10 } })
|
(mc/insert db coll {:_id oid :username "l33r0y" :score 100 :bonus 0})
|
||||||
(is (= { :_id oid :score 120 :bonus 10 :username "l33r0y" } (mgcol/find-map-by-id coll oid)))))
|
(mc/update db coll {:_id oid} {$inc {:score 20 :bonus 10}})
|
||||||
|
(is (= {:_id oid :score 120 :bonus 10 :username "l33r0y"}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
;;
|
|
||||||
;; $set
|
(deftest increment-and-set-multiple-existing-fields-using-$inc-modifier
|
||||||
;;
|
(let [coll "scores"
|
||||||
|
oid (ObjectId.)]
|
||||||
(deftest update-a-single-existing-field-using-$set-modifier
|
(mc/insert db coll {:_id oid :username "l33r0y" :score 100})
|
||||||
(let [coll "things"
|
(mc/update db coll {:_id oid} {$inc {:score 20 :bonus 10}})
|
||||||
oid (ObjectId.)]
|
(is (= {:_id oid :score 120 :bonus 10 :username "l33r0y"}
|
||||||
(mgcol/insert coll { :_id oid :weight 10.0 })
|
(mc/find-map-by-id db coll oid)))))
|
||||||
(mgcol/update coll { :_id oid } { $set { :weight 20.5 } })
|
|
||||||
(is (= 20.5 (:weight (mgcol/find-map-by-id coll oid [:weight]))))))
|
|
||||||
|
|
||||||
(deftest set-a-single-non-existing-field-using-$set-modifier
|
;;
|
||||||
(let [coll "things"
|
;; $set
|
||||||
oid (ObjectId.)]
|
;;
|
||||||
(mgcol/insert coll { :_id oid :weight 10.0 })
|
|
||||||
(mgcol/update coll { :_id oid } { $set { :height 17.2 } })
|
(deftest update-a-single-existing-field-using-$set-modifier
|
||||||
(is (= 17.2 (:height (mgcol/find-map-by-id coll oid [:height]))))))
|
(let [coll "things"
|
||||||
|
oid (ObjectId.)]
|
||||||
(deftest update-multiple-existing-fields-using-$set-modifier
|
(mc/insert db coll {:_id oid :weight 10.0})
|
||||||
(let [coll "things"
|
(mc/update db coll {:_id oid} {$set {:weight 20.5}})
|
||||||
oid (ObjectId.)]
|
(is (= 20.5 (:weight (mc/find-map-by-id db coll oid [:weight]))))))
|
||||||
(mgcol/insert coll { :_id oid :weight 10.0 :height 15.2 })
|
|
||||||
(mgcol/update coll { :_id oid } { $set { :weight 20.5 :height 25.6 } })
|
(deftest set-a-single-non-existing-field-using-$set-modifier
|
||||||
(is (= { :_id oid :weight 20.5 :height 25.6 } (mgcol/find-map-by-id coll oid [:weight :height])))))
|
(let [coll "things"
|
||||||
|
oid (ObjectId.)]
|
||||||
|
(mc/insert db coll {:_id oid :weight 10.0})
|
||||||
(deftest update-and-set-multiple-fields-using-$set-modifier
|
(mc/update db coll {:_id oid} {$set {:height 17.2}})
|
||||||
(let [coll "things"
|
(is (= 17.2 (:height (mc/find-map-by-id db coll oid [:height]))))))
|
||||||
oid (ObjectId.)]
|
|
||||||
(mgcol/insert coll { :_id oid :weight 10.0 })
|
(deftest update-multiple-existing-fields-using-$set-modifier
|
||||||
(mgcol/update coll { :_id oid } {$set { :weight 20.5 :height 25.6 } })
|
(let [coll "things"
|
||||||
(is (= { :_id oid :weight 20.5 :height 25.6 } (mgcol/find-map-by-id coll oid [:weight :height])))))
|
oid (ObjectId.)]
|
||||||
|
(mc/insert db coll {:_id oid :weight 10.0 :height 15.2})
|
||||||
|
(mc/update db coll {:_id oid} {$set {:weight 20.5 :height 25.6}})
|
||||||
;;
|
(is (= {:_id oid :weight 20.5 :height 25.6}
|
||||||
;; $unset
|
(mc/find-map-by-id db coll oid [:weight :height])))))
|
||||||
;;
|
|
||||||
|
|
||||||
(deftest unset-a-single-existing-field-using-$unset-modifier
|
(deftest update-and-set-multiple-fields-using-$set-modifier
|
||||||
(let [coll "docs"
|
(let [coll "things"
|
||||||
oid (ObjectId.)]
|
oid (ObjectId.)]
|
||||||
(mgcol/insert coll { :_id oid :title "Document 1" :published true })
|
(mc/insert db coll {:_id oid :weight 10.0})
|
||||||
(mgcol/update coll { :_id oid } { $unset { :published 1 } })
|
(mc/update db coll {:_id oid} {$set {:weight 20.5 :height 25.6}})
|
||||||
(is (= { :_id oid :title "Document 1" } (mgcol/find-map-by-id coll oid)))))
|
(is (= {:_id oid :weight 20.5 :height 25.6}
|
||||||
|
(mc/find-map-by-id db coll oid [:weight :height])))))
|
||||||
|
|
||||||
(deftest unset-multiple-existing-fields-using-$unset-modifier
|
|
||||||
(let [coll "docs"
|
;;
|
||||||
oid (ObjectId.)]
|
;; $unset
|
||||||
(mgcol/insert coll { :_id oid :title "Document 1" :published true :featured true })
|
;;
|
||||||
(mgcol/update coll { :_id oid } { $unset { :published 1 :featured true } })
|
|
||||||
(is (= { :_id oid :title "Document 1" } (mgcol/find-map-by-id coll oid)))))
|
(deftest unset-a-single-existing-field-using-$unset-modifier
|
||||||
|
|
||||||
|
|
||||||
(deftest unsetting-an-unexisting-field-using-$unset-modifier-is-not-considered-an-issue
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)]
|
|
||||||
(mgcol/insert coll { :_id oid :title "Document 1" :published true })
|
|
||||||
(is (mgres/ok? (mgcol/update coll { :_id oid } { $unset { :published 1 :featured true } })))
|
|
||||||
(is (= { :_id oid :title "Document 1" } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; $push
|
|
||||||
;;
|
|
||||||
|
|
||||||
(deftest initialize-an-array-using-$push-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$push modifier appends value to field"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title })
|
|
||||||
(mgcol/update coll { :_id oid } { $push { :tags "modifiers" } })
|
|
||||||
(is (= { :_id oid :title title :tags ["modifiers"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
(deftest add-value-to-an-existing-array-using-$push-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$push modifier appends value to field"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :tags ["mongodb"] })
|
|
||||||
(mgcol/update coll { :_id oid } { $push { :tags "modifiers" } })
|
|
||||||
(is (= { :_id oid :title title :tags ["mongodb" "modifiers"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
|
|
||||||
;; this is a common mistake, I leave it here to demonstrate it. You almost never
|
|
||||||
;; actually want to do this! What you really want is to use $pushAll instead of $push. MK.
|
|
||||||
(deftest add-array-value-to-an-existing-array-using-$push-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$push modifier appends value to field"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :tags ["mongodb"] })
|
|
||||||
(mgcol/update coll { :_id oid } { $push { :tags ["modifiers" "operators"] } })
|
|
||||||
(is (= { :_id oid :title title :tags ["mongodb" ["modifiers" "operators"]] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(deftest double-add-value-to-an-existing-array-using-$push-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$push modifier appends value to field"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :tags ["mongodb"] })
|
|
||||||
(mgcol/update coll { :_id oid } { $push { :tags "modifiers" } })
|
|
||||||
(mgcol/update coll { :_id oid } { $push { :tags "modifiers" } })
|
|
||||||
(is (= { :_id oid :title title :tags ["mongodb" "modifiers" "modifiers"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; $pushAll
|
|
||||||
;;
|
|
||||||
|
|
||||||
(deftest initialize-an-array-using-$pushAll-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$pushAll modifier appends multiple values to field"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title })
|
|
||||||
(mgcol/update coll { :_id oid } { $pushAll { :tags ["mongodb" "docs"] } })
|
|
||||||
(is (= { :_id oid :title title :tags ["mongodb" "docs"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
(deftest add-value-to-an-existing-array-using-$pushAll-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$pushAll modifier appends multiple values to field"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :tags ["mongodb"] })
|
|
||||||
(mgcol/update coll { :_id oid } { $pushAll { :tags ["modifiers" "docs"] } })
|
|
||||||
(is (= { :_id oid :title title :tags ["mongodb" "modifiers" "docs"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest double-add-value-to-an-existing-array-using-$pushAll-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$pushAll modifier appends multiple values to field"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :tags ["mongodb" "docs"] })
|
|
||||||
(mgcol/update coll { :_id oid } { $pushAll { :tags ["modifiers" "docs"] } })
|
|
||||||
(is (= { :_id oid :title title :tags ["mongodb" "docs" "modifiers" "docs"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; $addToSet
|
|
||||||
;;
|
|
||||||
|
|
||||||
(deftest initialize-an-array-using-$addToSet-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$addToSet modifier appends value to field unless it is already there"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title })
|
|
||||||
(mgcol/update coll { :_id oid } { $addToSet { :tags "modifiers" } })
|
|
||||||
(is (= { :_id oid :title title :tags ["modifiers"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
(deftest add-value-to-an-existing-array-using-$addToSet-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$addToSet modifier appends value to field unless it is already there"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :tags ["mongodb"] })
|
|
||||||
(mgcol/update coll { :_id oid } { $addToSet { :tags "modifiers" } })
|
|
||||||
(is (= { :_id oid :title title :tags ["mongodb" "modifiers"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest double-add-value-to-an-existing-array-using-$addToSet-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$addToSet modifier appends value to field unless it is already there"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :tags ["mongodb"] })
|
|
||||||
(mgcol/update coll { :_id oid } { $addToSet { :tags "modifiers" } })
|
|
||||||
(mgcol/update coll { :_id oid } { $addToSet { :tags "modifiers" } })
|
|
||||||
(is (= { :_id oid :title title :tags ["mongodb" "modifiers"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; $pop
|
|
||||||
;;
|
|
||||||
|
|
||||||
(deftest pop-last-value-in-the-array-using-$pop-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$pop modifier removes last or first value in the array"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :tags ["products" "apple" "reviews"] })
|
|
||||||
(mgcol/update coll { :_id oid } { $pop { :tags 1 } })
|
|
||||||
(is (= { :_id oid :title title :tags ["products" "apple"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
(deftest unshift-first-value-in-the-array-using-$pop-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$pop modifier removes last or first value in the array"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :tags ["products" "apple" "reviews"] })
|
|
||||||
(mgcol/update coll { :_id oid } { $pop { :tags -1 } })
|
|
||||||
(is (= { :_id oid :title title :tags ["apple" "reviews"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
(deftest pop-last-values-from-multiple-arrays-using-$pop-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$pop modifier removes last or first value in the array"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :tags ["products" "apple" "reviews"] :categories ["apple" "reviews" "drafts"] })
|
|
||||||
(mgcol/update coll { :_id oid } { $pop { :tags 1 :categories 1 } })
|
|
||||||
(is (= { :_id oid :title title :tags ["products" "apple"] :categories ["apple" "reviews"] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; $pull
|
|
||||||
;;
|
|
||||||
|
|
||||||
(deftest remove-all-value-entries-from-array-using-$pull-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$pull modifier removes all value entries in the array"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :measurements [1.0 1.2 1.2 1.2 1.1 1.1 1.2 1.3 1.0] })
|
|
||||||
(mgcol/update coll { :_id oid } { $pull { :measurements 1.2 } })
|
|
||||||
(is (= { :_id oid :title title :measurements [1.0 1.1 1.1 1.3 1.0] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
(deftest remove-all-value-entries-from-array-using-$pull-modifier-based-on-a-condition
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$pull modifier removes all value entries in the array"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :measurements [1.0 1.2 1.2 1.2 1.1 1.1 1.2 1.3 1.0] })
|
|
||||||
(mgcol/update coll { :_id oid } { $pull { :measurements { $gte 1.2 } } })
|
|
||||||
(is (= { :_id oid :title title :measurements [1.0 1.1 1.1 1.0] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
;;
|
|
||||||
;; $pullAll
|
|
||||||
;;
|
|
||||||
|
|
||||||
(deftest remove-all-value-entries-from-array-using-$pullAll-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$pullAll modifier removes entries of multiple values in the array"]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :measurements [1.0 1.2 1.2 1.2 1.1 1.1 1.2 1.3 1.0] })
|
|
||||||
(mgcol/update coll { :_id oid } { $pullAll { :measurements [1.0 1.1 1.2] } })
|
|
||||||
(is (= { :_id oid :title title :measurements [1.3] } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; $rename
|
|
||||||
;;
|
|
||||||
|
|
||||||
(deftest rename-a-single-field-using-$rename-modifier
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
title "$rename renames fields"
|
|
||||||
v [1.0 1.2 1.2 1.2 1.1 1.1 1.2 1.3 1.0]]
|
|
||||||
(mgcol/insert coll { :_id oid :title title :measurements v })
|
|
||||||
(mgcol/update coll { :_id oid } { $rename { :measurements "results" } })
|
|
||||||
(is (= { :_id oid :title title :results v } (mgcol/find-map-by-id coll oid)))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; find-and-modify
|
|
||||||
;;
|
|
||||||
|
|
||||||
(deftest find-and-modify-a-single-document
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
doc {:_id oid :name "Sophie Bangs" :level 42}
|
|
||||||
conditions {:name "Sophie Bangs"}
|
|
||||||
update {$inc {:level 1}}]
|
|
||||||
(mgcol/insert coll doc)
|
|
||||||
(let [res (mgcol/find-and-modify coll conditions update :return-new true)]
|
|
||||||
(is (= (select-keys res [:name :level]) {:name "Sophie Bangs" :level 43})))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest find-and-modify-remove-a-document
|
|
||||||
(let [coll "docs"
|
|
||||||
oid (ObjectId.)
|
|
||||||
doc {:_id oid :name "Sophie Bangs" :level 42}
|
|
||||||
conditions {:name "Sophie Bangs"}]
|
|
||||||
(mgcol/insert coll doc)
|
|
||||||
(let [res (mgcol/find-and-modify coll conditions {} :remove true)]
|
|
||||||
(is (= (select-keys res [:name :level]) {:name "Sophie Bangs" :level 42}))
|
|
||||||
(is (empty? (mgcol/find-maps coll conditions))))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest find-and-modify-upsert-a-document
|
|
||||||
(testing "case 1"
|
|
||||||
(let [coll "docs"
|
(let [coll "docs"
|
||||||
oid (ObjectId.)
|
oid (ObjectId.)]
|
||||||
doc {:_id oid :name "Sophie Bangs" :level 42}]
|
(mc/insert db coll {:_id oid :title "Document 1" :published true})
|
||||||
(let [res (mgcol/find-and-modify coll doc doc :upsert true)]
|
(mc/update db coll {:_id oid} {$unset {:published 1}})
|
||||||
(is (empty? res))
|
(is (= {:_id oid :title "Document 1"}
|
||||||
(is (select-keys (mgcol/find-map-by-id coll oid) [:name :level]) (dissoc doc :_id)))))
|
(mc/find-map-by-id db coll oid)))))
|
||||||
(testing "case 2"
|
|
||||||
|
|
||||||
|
(deftest unset-multiple-existing-fields-using-$unset-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)]
|
||||||
|
(mc/insert db coll {:_id oid :title "Document 1" :published true :featured true})
|
||||||
|
(mc/update db coll {:_id oid} {$unset {:published 1 :featured true}})
|
||||||
|
(is (= {:_id oid :title "Document 1"}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
|
||||||
|
(deftest unsetting-an-unexisting-field-using-$unset-modifier-is-not-considered-an-issue
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)]
|
||||||
|
(mc/insert db coll {:_id oid :title "Document 1" :published true})
|
||||||
|
(is (acknowledged? (mc/update db coll {:_id oid} {$unset {:published 1 :featured true}})))
|
||||||
|
(is (= {:_id oid :title "Document 1"}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; $setOnInsert
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest setOnInsert-in-upsert-for-non-existing-document
|
||||||
|
(let [coll "docs"
|
||||||
|
now 456
|
||||||
|
oid (ObjectId.)]
|
||||||
|
(mc/find-and-modify db coll {:_id oid} {$set {:lastseen now} $setOnInsert {:firstseen now}} {:upsert true})
|
||||||
|
(is (= {:_id oid :lastseen now :firstseen now}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
(deftest setOnInsert-in-upsert-for-existing-document
|
||||||
|
(let [coll "docs"
|
||||||
|
before 123
|
||||||
|
now 456
|
||||||
|
oid (ObjectId.)]
|
||||||
|
(mc/insert db coll {:_id oid :firstseen before :lastseen before})
|
||||||
|
(mc/find-and-modify db coll {:_id oid} {$set {:lastseen now} $setOnInsert {:firstseen now}} {:upsert true})
|
||||||
|
(is (= {:_id oid :lastseen now :firstseen before}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; $push
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest initialize-an-array-using-$push-modifier
|
||||||
(let [coll "docs"
|
(let [coll "docs"
|
||||||
query {:name "Sophie Bangs"}
|
oid (ObjectId.)
|
||||||
doc (merge query {:level 42})]
|
title "$push modifier appends value to field"]
|
||||||
(let [res (mgcol/find-and-modify coll query doc :upsert true :return-new true)]
|
(mc/insert db coll {:_id oid :title title})
|
||||||
(is (:_id res))
|
(mc/update db coll {:_id oid} {$push {:tags "modifiers"}})
|
||||||
(is (select-keys (mgcol/find-map-by-id coll (:_id res)) [:name :level]) doc)))))
|
(is (= {:_id oid :title title :tags ["modifiers"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
(deftest add-value-to-an-existing-array-using-$push-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$push modifier appends value to field"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["mongodb"]})
|
||||||
|
(mc/update db coll {:_id oid} {$push {:tags "modifiers"}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "modifiers"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
|
||||||
(deftest find-and-modify-after-sort
|
;; this is a common mistake, I leave it here to demonstrate it. You almost never
|
||||||
(let [coll "docs"
|
;; actually want to do this! What you really want is to use $push with $each instead of $push. MK.
|
||||||
oid (ObjectId.)
|
(deftest add-array-value-to-an-existing-array-using-$push-modifier
|
||||||
oid2 (ObjectId.)
|
(let [coll "docs"
|
||||||
doc {:name "Sophie Bangs"}
|
oid (ObjectId.)
|
||||||
doc1 (assoc doc :_id oid :level 42)
|
title "$push modifier appends value to field"]
|
||||||
doc2 (assoc doc :_id oid2 :level 0)]
|
(mc/insert db coll {:_id oid :title title :tags ["mongodb"]})
|
||||||
(mgcol/insert-batch coll [doc1 doc2])
|
(mc/update db coll {:_id oid} {$push {:tags ["modifiers" "operators"]}})
|
||||||
(let [res (mgcol/find-and-modify coll doc {$inc {:level 1}} :sort {:level -1})]
|
(is (= {:_id oid :title title :tags ["mongodb" ["modifiers" "operators"]]}
|
||||||
(is (= (select-keys res [:name :level]) {:name "Sophie Bangs" :level 42})))))
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
(deftest double-add-value-to-an-existing-array-using-$push-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$push modifier appends value to field"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["mongodb"]})
|
||||||
|
(mc/update db coll {:_id oid} {$push {:tags "modifiers"}})
|
||||||
|
(mc/update db coll {:_id oid} {$push {:tags "modifiers"}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "modifiers" "modifiers"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; $push $each
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest initialize-an-array-using-$push-$each-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$push with $each modifier appends multiple values to field"]
|
||||||
|
(mc/insert db coll {:_id oid :title title})
|
||||||
|
(mc/update db coll {:_id oid} {$push {:tags {$each ["mongodb" "docs"]}}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "docs"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
(deftest add-values-to-an-existing-array-using-$push-$each-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$push with $each modifier appends multiple values to field"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["mongodb"]})
|
||||||
|
(mc/update db coll {:_id oid} {$push {:tags {$each ["modifiers" "docs"]}}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "modifiers" "docs"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
(deftest double-add-value-to-an-existing-array-using-$push-$each-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$push with $each modifier appends multiple values to field"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["mongodb" "docs"]})
|
||||||
|
(mc/update db coll {:_id oid} {$push {:tags {$each ["modifiers" "docs"]}}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "docs" "modifiers" "docs"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; $push + $each (formerly $pushAll)
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest initialize-an-array-using-$push-and-$each-modifiers
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$pushAll modifier appends multiple values to field"]
|
||||||
|
(mc/insert db coll {:_id oid :title title})
|
||||||
|
(mc/update db coll {:_id oid} {$push {:tags {$each ["mongodb" "docs"]}}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "docs"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
(deftest add-value-to-an-existing-array-using-$push-and-$each-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$pushAll modifier appends multiple values to field"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["mongodb"]})
|
||||||
|
(mc/update db coll {:_id oid} {$push {:tags {$each ["modifiers" "docs"]}}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "modifiers" "docs"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
|
||||||
|
(deftest double-add-value-to-an-existing-array-using-$push-and-$each-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$pushAll modifier appends multiple values to field"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["mongodb" "docs"]})
|
||||||
|
(mc/update db coll {:_id oid} {$push {:tags {$each ["modifiers" "docs"]}}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "docs" "modifiers" "docs"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; $addToSet
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest initialize-an-array-using-$addToSet-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$addToSet modifier appends value to field unless it is already there"]
|
||||||
|
(mc/insert db coll {:_id oid :title title})
|
||||||
|
(mc/update db coll {:_id oid} {$addToSet {:tags "modifiers"}})
|
||||||
|
(is (= {:_id oid :title title :tags ["modifiers"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
(deftest add-value-to-an-existing-array-using-$addToSet-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$addToSet modifier appends value to field unless it is already there"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["mongodb"]})
|
||||||
|
(mc/update db coll {:_id oid} {$addToSet {:tags "modifiers"}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "modifiers"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
|
||||||
|
(deftest double-add-value-to-an-existing-array-using-$addToSet-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$addToSet modifier appends value to field unless it is already there"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["mongodb"]})
|
||||||
|
(mc/update db coll {:_id oid} {$addToSet {:tags "modifiers"}})
|
||||||
|
(mc/update db coll {:_id oid} {$addToSet {:tags "modifiers"}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "modifiers"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; $addToSet $each
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest initialize-an-array-using-$addToSet-$each-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$addToSet with $each modifier appends multiple values to field unless they are already there"]
|
||||||
|
(mc/insert db coll {:_id oid :title title})
|
||||||
|
(mc/update db coll {:_id oid} {$addToSet {:tags {$each ["mongodb" "docs"]}}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "docs"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
(deftest add-values-to-an-existing-array-using-$addToSet-$each-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$addToSet with $each modifier appends multiple values to field unless they are already there"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["mongodb"]})
|
||||||
|
(mc/update db coll {:_id oid} {$addToSet {:tags {$each ["modifiers" "docs"]}}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "modifiers" "docs"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
(deftest double-add-value-to-an-existing-array-using-$addToSet-$each-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$addToSet with $each modifier appends multiple values to field unless they are already there"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["mongodb" "docs"]})
|
||||||
|
(mc/update db coll {:_id oid} {$addToSet {:tags {$each ["modifiers" "docs" "operators"]}}})
|
||||||
|
(is (= {:_id oid :title title :tags ["mongodb" "docs" "modifiers" "operators"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; $pop
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest pop-last-value-in-the-array-using-$pop-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$pop modifier removes last or first value in the array"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["products" "apple" "reviews"]})
|
||||||
|
(mc/update db coll {:_id oid} {$pop {:tags 1}})
|
||||||
|
(is (= {:_id oid :title title :tags ["products" "apple"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
(deftest unshift-first-value-in-the-array-using-$pop-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$pop modifier removes last or first value in the array"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["products" "apple" "reviews"]})
|
||||||
|
(mc/update db coll {:_id oid} {$pop {:tags -1}})
|
||||||
|
(is (= {:_id oid :title title :tags ["apple" "reviews"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
(deftest pop-last-values-from-multiple-arrays-using-$pop-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$pop modifier removes last or first value in the array"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :tags ["products" "apple" "reviews"] :categories ["apple" "reviews" "drafts"]})
|
||||||
|
(mc/update db coll {:_id oid} {$pop {:tags 1 :categories 1}})
|
||||||
|
(is (= {:_id oid :title title :tags ["products" "apple"] :categories ["apple" "reviews"]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; $pull
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest remove-all-value-entries-from-array-using-$pull-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$pull modifier removes all value entries in the array"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :measurements [1.0 1.2 1.2 1.2 1.1 1.1 1.2 1.3 1.0]})
|
||||||
|
(mc/update db coll {:_id oid} {$pull {:measurements 1.2}})
|
||||||
|
(is (= {:_id oid :title title :measurements [1.0 1.1 1.1 1.3 1.0]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
(deftest remove-all-value-entries-from-array-using-$pull-modifier-based-on-a-condition
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$pull modifier removes all value entries in the array"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :measurements [1.0 1.2 1.2 1.2 1.1 1.1 1.2 1.3 1.0]})
|
||||||
|
(mc/update db coll {:_id oid} {$pull {:measurements {$gte 1.2}}})
|
||||||
|
(is (= {:_id oid :title title :measurements [1.0 1.1 1.1 1.0]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
;;
|
||||||
|
;; $pullAll
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest remove-all-value-entries-from-array-using-$pullAll-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$pullAll modifier removes entries of multiple values in the array"]
|
||||||
|
(mc/insert db coll {:_id oid :title title :measurements [1.0 1.2 1.2 1.2 1.1 1.1 1.2 1.3 1.0]})
|
||||||
|
(mc/update db coll {:_id oid} {$pullAll {:measurements [1.0 1.1 1.2]}})
|
||||||
|
(is (= {:_id oid :title title :measurements [1.3]}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; $rename
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest rename-a-single-field-using-$rename-modifier
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
title "$rename renames fields"
|
||||||
|
v [1.0 1.2 1.2 1.2 1.1 1.1 1.2 1.3 1.0]]
|
||||||
|
(mc/insert db coll {:_id oid :title title :measurements v})
|
||||||
|
(mc/update db coll {:_id oid} {$rename {:measurements "results"}})
|
||||||
|
(is (= {:_id oid :title title :results v}
|
||||||
|
(mc/find-map-by-id db coll oid)))))
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; find-and-modify
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest find-and-modify-a-single-document
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
doc {:_id oid :name "Sophie Bangs" :level 42}
|
||||||
|
conditions {:name "Sophie Bangs"}
|
||||||
|
update {$inc {:level 1}}]
|
||||||
|
(mc/insert db coll doc)
|
||||||
|
(let [res (mc/find-and-modify db coll conditions update {:return-new true})]
|
||||||
|
(is (= (select-keys res [:name :level]) {:name "Sophie Bangs" :level 43})))))
|
||||||
|
|
||||||
|
|
||||||
|
(deftest find-and-modify-remove-a-document
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
doc {:_id oid :name "Sophie Bangs" :level 42}
|
||||||
|
conditions {:name "Sophie Bangs"}]
|
||||||
|
(mc/insert db coll doc)
|
||||||
|
(let [res (mc/find-and-modify db coll conditions {} {:remove true})]
|
||||||
|
(is (= (select-keys res [:name :level]) {:name "Sophie Bangs" :level 42}))
|
||||||
|
(is (empty? (mc/find-maps db coll conditions))))))
|
||||||
|
|
||||||
|
|
||||||
|
(deftest find-and-modify-upsert-a-document
|
||||||
|
(testing "case 1"
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
doc {:_id oid :name "Sophie Bangs" :level 42}]
|
||||||
|
(let [res (mc/find-and-modify db coll doc doc {:upsert true})]
|
||||||
|
(is (empty? res))
|
||||||
|
(is (select-keys (mc/find-map-by-id db coll oid) [:name :level]) (dissoc doc :_id)))))
|
||||||
|
(testing "case 2"
|
||||||
|
(let [coll "docs"
|
||||||
|
query {:name "Sophie Bangs"}
|
||||||
|
doc (merge query {:level 42})]
|
||||||
|
(let [res (mc/find-and-modify db coll query doc {:upsert true :return-new true})]
|
||||||
|
(is (:_id res))
|
||||||
|
(is (select-keys (mc/find-map-by-id db coll (:_id res)) [:name :level]) doc)))))
|
||||||
|
|
||||||
|
|
||||||
|
(deftest find-and-modify-after-sort
|
||||||
|
(let [coll "docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
oid2 (ObjectId.)
|
||||||
|
doc {:name "Sophie Bangs"}
|
||||||
|
doc1 (assoc doc :_id oid :level 42)
|
||||||
|
doc2 (assoc doc :_id oid2 :level 0)]
|
||||||
|
(mc/insert-batch db coll [doc1 doc2])
|
||||||
|
(let [res (mc/find-and-modify db coll doc {$inc {:level 1}} {:sort {:level -1}})]
|
||||||
|
(is (= (select-keys res [:name :level]) {:name "Sophie Bangs" :level 42}))))))
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,42 @@
|
||||||
(ns monger.test.authentication-test
|
(ns monger.test.authentication-test
|
||||||
(:require [monger core util db]
|
(:require [monger util db]
|
||||||
[monger.test.helper :as helper])
|
[monger.credentials :as mcr]
|
||||||
(:use [clojure.test]))
|
[monger.core :as mg]
|
||||||
|
[monger.collection :as mc]
|
||||||
|
[clojure.test :refer :all]))
|
||||||
|
|
||||||
(helper/connect!)
|
;;
|
||||||
|
;; Connection via URI
|
||||||
|
;;
|
||||||
|
|
||||||
|
(when-not (System/getenv "CI")
|
||||||
|
(deftest ^{:authentication true} connect-to-mongo-via-uri-without-credentials
|
||||||
|
(let [{:keys [conn db]} (mg/connect-via-uri "mongodb://127.0.0.1/monger-test4")]
|
||||||
|
(is (-> conn .getAddress (.sameHost "127.0.0.1")))))
|
||||||
|
|
||||||
|
(deftest ^{:authentication true} connect-to-mongo-via-uri-with-valid-credentials
|
||||||
|
(let [{:keys [conn db]} (mg/connect-via-uri "mongodb://clojurewerkz%2Fmonger:monger@127.0.0.1/monger-test4")]
|
||||||
|
(is (= "monger-test4" (.getName db)))
|
||||||
|
(is (-> conn .getAddress (.sameHost "127.0.0.1")))
|
||||||
|
(mc/remove db "documents")
|
||||||
|
;; make sure that the database is selected
|
||||||
|
;; and operations get through.
|
||||||
|
(mc/insert db "documents" {:field "value"})
|
||||||
|
(is (= 1 (mc/count db "documents" {}))))))
|
||||||
|
|
||||||
|
(if-let [uri (System/getenv "MONGOHQ_URL")]
|
||||||
|
(deftest ^{:external true :authentication true} connect-to-mongo-via-uri-with-valid-credentials
|
||||||
|
(let [{:keys [conn db]} (mg/connect-via-uri uri)]
|
||||||
|
(is (-> conn .getAddress (.sameHost "127.0.0.1"))))))
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; Regular connecton
|
||||||
|
;;
|
||||||
|
|
||||||
(deftest test-authentication-with-valid-credentials
|
(deftest ^{:authentication true} test-authentication-with-valid-credentials
|
||||||
;; see ./bin/ci/before_script.sh. MK.
|
;; see ./bin/ci/before_script.sh. MK.
|
||||||
(let [username "clojurewerkz/monger"
|
(doseq [s ["monger-test" "monger-test2" "monger-test3" "monger-test4"]]
|
||||||
pwd "monger"]
|
(let [creds (mcr/create "clojurewerkz/monger" "monger-test" "monger")
|
||||||
(is (monger.core/authenticate "monger-test" username (.toCharArray pwd)))))
|
conn (mg/connect-with-credentials "127.0.0.1" creds)]
|
||||||
|
(mc/remove (mg/get-db conn "monger-test") "documents"))))
|
||||||
(deftest test-authentication-with-invalid-credentials
|
|
||||||
(let [username "monger"
|
|
||||||
^String pwd (monger.util/random-str 128 32)]
|
|
||||||
(is (not (monger.core/authenticate "monger-test2" username (.toCharArray pwd))))))
|
|
||||||
|
|
|
||||||
|
|
@ -1,123 +0,0 @@
|
||||||
(ns monger.test.cache-test
|
|
||||||
(:require [monger.test.helper :as helper]
|
|
||||||
[monger.collection :as mc])
|
|
||||||
(:use clojure.core.cache clojure.test monger.cache)
|
|
||||||
(:import [clojure.core.cache BasicCache FIFOCache LRUCache TTLCache]
|
|
||||||
java.util.UUID))
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; Playground/Tests. These were necessary because clojure.core.cache has
|
|
||||||
;; little documentation, incomplete test suite and
|
|
||||||
;; slightly non-standard (although necessary to support all those cache variations)
|
|
||||||
;; cache operations protocol.
|
|
||||||
;;
|
|
||||||
;; This is by no means clear or complete either but it did the job of helping me
|
|
||||||
;; explore the API.
|
|
||||||
|
|
||||||
(deftest ^{:cache true}
|
|
||||||
test-has?-with-basic-cache
|
|
||||||
(testing "that has? returns false for misses"
|
|
||||||
(let [c (BasicCache. {})]
|
|
||||||
(are [v] (is (false? (has? c v)))
|
|
||||||
:missing-key
|
|
||||||
"missing-key"
|
|
||||||
(gensym "missing-key"))))
|
|
||||||
(testing "that has? returns true for hits"
|
|
||||||
(let [c (BasicCache. {:skey "Value" :lkey (Long/valueOf 10000) "kkey" :keyword})]
|
|
||||||
(are [v] (is (has? c v))
|
|
||||||
:skey
|
|
||||||
:lkey
|
|
||||||
"kkey"))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest ^{:cache true}
|
|
||||||
test-lookup-with-basic-cache
|
|
||||||
(testing "that lookup returns nil for misses"
|
|
||||||
(let [c (BasicCache. {})]
|
|
||||||
(are [v] (is (nil? (lookup c v)))
|
|
||||||
:missing-key
|
|
||||||
"missing-key"
|
|
||||||
(gensym "missing-key"))))
|
|
||||||
(testing "that lookup returns cached values for hits"
|
|
||||||
(let [l (Long/valueOf 10000)
|
|
||||||
c (BasicCache. {:skey "Value" :lkey l "kkey" :keyword})]
|
|
||||||
(are [v k] (is (= v (lookup c k)))
|
|
||||||
"Value" :skey
|
|
||||||
l :lkey
|
|
||||||
:keyword "kkey"))))
|
|
||||||
|
|
||||||
(deftest ^{:cache true}
|
|
||||||
test-evict-with-basic-cache
|
|
||||||
(testing "that evict has no effect for keys that do not exist"
|
|
||||||
(let [c (atom (BasicCache. {:a 1 :b 2}))]
|
|
||||||
(swap! c evict :missing-key)
|
|
||||||
(is (has? @c :a))
|
|
||||||
(is (has? @c :b))))
|
|
||||||
(testing "that evict removes keys that did exist"
|
|
||||||
(let [c (atom (BasicCache. {:skey "Value" "kkey" :keyword}))]
|
|
||||||
(is (has? @c :skey))
|
|
||||||
(is (= "Value" (lookup @c :skey)))
|
|
||||||
(swap! c evict :skey)
|
|
||||||
(is (not (has? @c :skey)))
|
|
||||||
(is (= nil (lookup @c :skey)))
|
|
||||||
(is (has? @c "kkey"))
|
|
||||||
(is (= :keyword (lookup @c "kkey"))))))
|
|
||||||
|
|
||||||
(deftest ^{:cache true}
|
|
||||||
test-seed-with-basic-cache
|
|
||||||
(testing "that seed returns a new value"
|
|
||||||
(let [c (atom (BasicCache. {}))]
|
|
||||||
(swap! c seed {:a 1 :b "b" "c" :d})
|
|
||||||
(are [k v] (do
|
|
||||||
(is (has? @c k))
|
|
||||||
(is (= v (lookup @c k))))
|
|
||||||
:a 1
|
|
||||||
:b "b"
|
|
||||||
"c" :d))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; Tests
|
|
||||||
;;
|
|
||||||
|
|
||||||
(helper/connect!)
|
|
||||||
|
|
||||||
(use-fixtures :each (fn [f]
|
|
||||||
(mc/remove "basic_monger_cache_entries")
|
|
||||||
(f)
|
|
||||||
(mc/remove "basic_monger_cache_entries")))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest ^{:cache true}
|
|
||||||
test-has?-with-basic-monger-cache
|
|
||||||
(testing "that has? returns false for misses"
|
|
||||||
(let [coll "basic_monger_cache_entries"
|
|
||||||
c (basic-monger-cache-factory coll)]
|
|
||||||
(is (not (has? c (str (UUID/randomUUID)))))
|
|
||||||
(is (not (has? c (str (UUID/randomUUID)))))))
|
|
||||||
(testing "that has? returns true for hits"
|
|
||||||
(let [coll "basic_monger_cache_entries"
|
|
||||||
c (basic-monger-cache-factory coll {"a" 1 "b" "cache" "c" 3/4})]
|
|
||||||
(is (has? c "a"))
|
|
||||||
(is (has? c "b"))
|
|
||||||
(is (has? c "c"))
|
|
||||||
(is (not (has? c "d"))))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest ^{:cache true}
|
|
||||||
test-lookup-with-basic-moger-cache
|
|
||||||
(testing "that lookup returns nil for misses"
|
|
||||||
(let [coll "basic_monger_cache_entries"
|
|
||||||
c (basic-monger-cache-factory coll)]
|
|
||||||
(are [v] (is (nil? (lookup c v)))
|
|
||||||
:missing-key
|
|
||||||
"missing-key"
|
|
||||||
(gensym "missing-key"))))
|
|
||||||
(testing "that lookup returns cached values for hits"
|
|
||||||
(let [l (Long/valueOf 10000)
|
|
||||||
coll "basic_monger_cache_entries"
|
|
||||||
c (basic-monger-cache-factory coll {:skey "Value" :lkey l "kkey" :keyword})]
|
|
||||||
(are [v k] (is (= v (lookup c k)))
|
|
||||||
"Value" :skey
|
|
||||||
l :lkey
|
|
||||||
"keyword" "kkey"))))
|
|
||||||
|
|
@ -1,38 +1,25 @@
|
||||||
(set! *warn-on-reflection* true)
|
|
||||||
|
|
||||||
(ns monger.test.capped-collections-test
|
(ns monger.test.capped-collections-test
|
||||||
(:require [monger core util]
|
(:require [monger.core :as mg]
|
||||||
[monger.collection :as mc]
|
[monger.collection :as mc]
|
||||||
[monger.result :as mres]
|
[clojure.test :refer :all]
|
||||||
[monger.test.helper :as helper])
|
[monger.operators :refer :all]))
|
||||||
(:use clojure.test
|
|
||||||
monger.operators
|
|
||||||
monger.test.fixtures))
|
|
||||||
|
|
||||||
|
|
||||||
(helper/connect!)
|
|
||||||
|
|
||||||
(use-fixtures :each purge-cached)
|
|
||||||
|
|
||||||
(defn- megabytes
|
(defn- megabytes
|
||||||
[^long n]
|
[^long n]
|
||||||
(* n 1024 1024))
|
(* n 1024 1024))
|
||||||
|
|
||||||
|
(let [conn (mg/connect)
|
||||||
;;
|
db (mg/get-db conn "monger-test")]
|
||||||
;; Tests
|
(deftest test-inserting-into-capped-collection
|
||||||
;;
|
(let [n 1000
|
||||||
|
cname "cached"
|
||||||
(deftest test-inserting-into-capped-collection
|
_ (mc/drop db cname)
|
||||||
(let [n 1000
|
coll (mc/create db cname {:capped true :size (-> 16 megabytes) :max n})]
|
||||||
cname "cached"
|
(is (= cname (.getName coll)))
|
||||||
_ (mc/drop cname)
|
(mc/insert-batch db cname (for [i (range 0 (+ n 100))] {:i i}))
|
||||||
coll (mc/create cname {:capped true :size (-> 16 megabytes) :max n})]
|
(is (= n (mc/count db cname)))
|
||||||
(is (= cname (.getName coll)))
|
;; older elements get replaced by newer ones
|
||||||
(mc/insert-batch cname (for [i (range 0 (+ n 100))] {:i i}))
|
(is (not (mc/any? db cname {:i 1})))
|
||||||
(is (= n (mc/count cname)))
|
(is (not (mc/any? db cname {:i 5})))
|
||||||
;; older elements get replaced by newer ones
|
(is (not (mc/any? db cname {:i 9})))
|
||||||
(is (not (mc/any? cname {:i 1})))
|
(is (mc/any? db cname {:i (+ n 80)})))))
|
||||||
(is (not (mc/any? cname {:i 5})))
|
|
||||||
(is (not (mc/any? cname {:i 9})))
|
|
||||||
(is (mc/any? cname {:i (+ n 80)}))))
|
|
||||||
|
|
|
||||||
|
|
@ -1,153 +1,193 @@
|
||||||
(set! *warn-on-reflection* true)
|
|
||||||
|
|
||||||
(ns monger.test.collection-test
|
(ns monger.test.collection-test
|
||||||
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject CommandResult$CommandFailure MapReduceOutput MapReduceCommand MapReduceCommand$OutputType]
|
(:import org.bson.types.ObjectId
|
||||||
org.bson.types.ObjectId
|
|
||||||
java.util.Date)
|
java.util.Date)
|
||||||
(:require [monger.core :as mg]
|
(:require [monger.core :as mg]
|
||||||
[monger.collection :as mc]
|
[monger.collection :as mc]
|
||||||
[monger.result :as mgres]
|
[clojure.test :refer :all]
|
||||||
[monger.test.helper :as helper])
|
[monger.operators :refer :all]))
|
||||||
(:use clojure.test
|
|
||||||
monger.operators
|
|
||||||
monger.test.fixtures))
|
|
||||||
|
|
||||||
(helper/connect!)
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")]
|
||||||
|
|
||||||
(use-fixtures :each purge-people purge-docs purge-things purge-libraries)
|
(defn purge-collections
|
||||||
|
[f]
|
||||||
|
(mc/remove db "people")
|
||||||
|
(mc/remove db "docs")
|
||||||
|
(mc/remove db "things")
|
||||||
|
(mc/remove db "libraries")
|
||||||
|
(f)
|
||||||
|
(mc/remove db "people")
|
||||||
|
(mc/remove db "docs")
|
||||||
|
(mc/remove db "things")
|
||||||
|
(mc/remove db "libraries"))
|
||||||
|
|
||||||
|
(use-fixtures :each purge-collections)
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; count, remove
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest get-collection-size
|
||||||
|
(let [collection "things"]
|
||||||
|
(is (= 0 (mc/count db collection)))
|
||||||
|
(mc/insert-batch db collection [{:language "Clojure" :name "langohr"}
|
||||||
|
{:language "Clojure" :name "monger"}
|
||||||
|
{:language "Clojure" :name "incanter"}
|
||||||
|
{:language "Scala" :name "akka"}])
|
||||||
|
(is (= 4 (mc/count db collection)))
|
||||||
|
(is (mc/any? db collection))
|
||||||
|
(is (= 3 (mc/count db collection {:language "Clojure"})))
|
||||||
|
(is (mc/any? db collection {:language "Clojure"}))
|
||||||
|
(is (= 1 (mc/count db collection {:language "Scala" })))
|
||||||
|
(is (mc/any? db collection {:language "Scala"}))
|
||||||
|
(is (= 0 (mc/count db collection {:language "Python" })))
|
||||||
|
(is (not (mc/any? db collection {:language "Python"})))))
|
||||||
|
|
||||||
|
|
||||||
;;
|
(deftest remove-all-documents-from-collection
|
||||||
;; count, remove
|
(let [collection "libraries"]
|
||||||
;;
|
(mc/insert-batch db collection [{:language "Clojure" :name "monger"}
|
||||||
|
{:language "Clojure" :name "langohr"}
|
||||||
(deftest get-collection-size
|
{:language "Clojure" :name "incanter"}
|
||||||
(let [collection "things"]
|
{:language "Scala" :name "akka"}])
|
||||||
(is (= 0 (mc/count collection)))
|
(is (= 4 (mc/count db collection)))
|
||||||
(mc/insert-batch collection [{:language "Clojure" :name "langohr"}
|
(mc/remove db collection)
|
||||||
{:language "Clojure" :name "monger"}
|
(is (= 0 (mc/count db collection)))))
|
||||||
{:language "Clojure" :name "incanter"}
|
|
||||||
{:language "Scala" :name "akka"}])
|
|
||||||
(is (= 4 (mc/count collection)))
|
|
||||||
(is (mc/any? collection))
|
|
||||||
(is (= 3 (mc/count mg/*mongodb-database* collection {:language "Clojure"})))
|
|
||||||
(is (mc/any? mg/*mongodb-database* collection {:language "Clojure"}))
|
|
||||||
(is (= 1 (mc/count collection {:language "Scala" })))
|
|
||||||
(is (mc/any? collection {:language "Scala"}))
|
|
||||||
(is (= 0 (mc/count mg/*mongodb-database* collection {:language "Python" })))
|
|
||||||
(is (not (mc/any? mg/*mongodb-database* collection {:language "Python"})))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest remove-all-documents-from-collection
|
(deftest remove-some-documents-from-collection
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(mc/insert-batch collection [{:language "Clojure" :name "monger"}
|
(mc/insert-batch db collection [{:language "Clojure" :name "monger"}
|
||||||
{:language "Clojure" :name "langohr"}
|
{:language "Clojure" :name "langohr"}
|
||||||
{:language "Clojure" :name "incanter"}
|
{:language "Clojure" :name "incanter"}
|
||||||
{:language "Scala" :name "akka"}])
|
{:language "Scala" :name "akka"}])
|
||||||
(is (= 4 (mc/count collection)))
|
(is (= 4 (mc/count db collection)))
|
||||||
(mc/remove collection)
|
(mc/remove db collection {:language "Clojure"})
|
||||||
(is (= 0 (mc/count collection)))))
|
(is (= 1 (mc/count db collection)))))
|
||||||
|
|
||||||
|
(deftest remove-a-single-document-from-collection
|
||||||
|
(let [collection "libraries"
|
||||||
|
oid (ObjectId.)]
|
||||||
|
(mc/insert-batch db collection [{:language "Clojure" :name "monger" :_id oid}])
|
||||||
|
(mc/remove-by-id db collection oid)
|
||||||
|
(is (= 0 (mc/count db collection)))
|
||||||
|
(is (nil? (mc/find-by-id db collection oid)))))
|
||||||
|
|
||||||
|
|
||||||
(deftest remove-some-documents-from-collection
|
;;
|
||||||
(let [collection "libraries"]
|
;; exists?, drop, create
|
||||||
(mc/insert-batch collection [{:language "Clojure" :name "monger"}
|
;;
|
||||||
{:language "Clojure" :name "langohr"}
|
|
||||||
{:language "Clojure" :name "incanter"}
|
|
||||||
{:language "Scala" :name "akka"}])
|
|
||||||
(is (= 4 (mc/count collection)))
|
|
||||||
(mc/remove collection {:language "Clojure"})
|
|
||||||
(is (= 1 (mc/count collection)))))
|
|
||||||
|
|
||||||
(deftest remove-a-single-document-from-collection
|
(deftest checking-for-collection-existence-when-it-does-not-exist
|
||||||
(let [collection "libraries"
|
(let [collection "widgets"]
|
||||||
oid (ObjectId.)]
|
(mc/drop db collection)
|
||||||
(mc/insert-batch collection [{:language "Clojure" :name "monger" :_id oid}])
|
(is (false? (mc/exists? db collection)))))
|
||||||
(mc/remove-by-id collection oid)
|
|
||||||
(is (= 0 (mc/count collection)))
|
(deftest checking-for-collection-existence-when-it-does-exist
|
||||||
(is (nil? (mc/find-by-id collection oid)))))
|
(let [collection "widgets"]
|
||||||
|
(mc/drop db collection)
|
||||||
|
(mc/insert-batch db collection [{:name "widget1"}
|
||||||
|
{:name "widget2"}])
|
||||||
|
(is (mc/exists? db collection))
|
||||||
|
(mc/drop db collection)
|
||||||
|
(is (false? (mc/exists? db collection)))
|
||||||
|
(mc/create db "widgets" {:capped true :size 100000 :max 10})
|
||||||
|
(is (mc/exists? db collection))
|
||||||
|
(mc/rename db collection "gadgets")
|
||||||
|
(is (not (mc/exists? db collection)))
|
||||||
|
(is (mc/exists? db "gadgets"))
|
||||||
|
(mc/drop db "gadgets")))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; any?, empty?
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest test-any-on-empty-collection
|
||||||
|
(let [collection "things"]
|
||||||
|
(is (not (mc/any? db collection)))))
|
||||||
|
|
||||||
|
(deftest test-any-on-non-empty-collection
|
||||||
|
(let [collection "things"
|
||||||
|
_ (mc/insert db collection {:language "Clojure" :name "langohr"})]
|
||||||
|
(is (mc/any? db "things" {:language "Clojure"}))))
|
||||||
|
|
||||||
|
(deftest test-empty-on-empty-collection
|
||||||
|
(let [collection "things"]
|
||||||
|
(is (mc/empty? db collection))))
|
||||||
|
|
||||||
|
(deftest test-empty-on-non-empty-collection
|
||||||
|
(let [collection "things"
|
||||||
|
_ (mc/insert db collection {:language "Clojure" :name "langohr"})]
|
||||||
|
(is (not (mc/empty? db "things")))))
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; exists?, drop, create
|
;; distinct
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(deftest checking-for-collection-existence-when-it-does-not-exist
|
(deftest test-distinct-values
|
||||||
(let [collection "widgets"]
|
(let [collection "widgets"
|
||||||
(mc/drop collection)
|
batch [{:state "CA" :quantity 1 :price 199.00}
|
||||||
(is (false? (mc/exists? collection)))))
|
{:state "NY" :quantity 2 :price 199.00}
|
||||||
|
{:state "NY" :quantity 1 :price 299.00}
|
||||||
|
{:state "IL" :quantity 2 :price 11.50 }
|
||||||
|
{:state "CA" :quantity 2 :price 2.95 }
|
||||||
|
{:state "IL" :quantity 3 :price 5.50 }]]
|
||||||
|
(mc/insert-batch db collection batch)
|
||||||
|
(is (= ["CA" "IL" "NY"] (sort (mc/distinct db collection :state))))
|
||||||
|
(is (= ["CA" "IL" "NY"] (sort (mc/distinct db collection :state {}))))
|
||||||
|
(is (= ["CA" "NY"] (sort (mc/distinct db collection :state {:price {$gt 100.00}}))))))
|
||||||
|
|
||||||
(deftest checking-for-collection-existence-when-it-does-exist
|
;;
|
||||||
(let [collection "widgets"]
|
;; update
|
||||||
(mc/drop collection)
|
;;
|
||||||
(mc/insert-batch collection [{:name "widget1"}
|
|
||||||
{:name "widget2"}])
|
|
||||||
(is (mc/exists? collection))
|
|
||||||
(mc/drop collection)
|
|
||||||
(is (false? (mc/exists? collection)))
|
|
||||||
(mc/create "widgets" {:capped true :size 100000 :max 10})
|
|
||||||
(is (mc/exists? collection))
|
|
||||||
(mc/rename collection "gadgets")
|
|
||||||
(is (not (mc/exists? collection)))
|
|
||||||
(is (mc/exists? "gadgets"))
|
|
||||||
(mc/drop "gadgets")))
|
|
||||||
|
|
||||||
;;
|
(let [coll "things"
|
||||||
;; any?, empty?
|
batch [{:_id 1 :type "rock" :size "small"}
|
||||||
;;
|
{:_id 2 :type "bed" :size "bed-sized"}
|
||||||
|
{:_id 3 :type "bottle" :size "1.5 liters"}]]
|
||||||
|
|
||||||
(deftest test-any-on-empty-collection
|
(deftest test-update
|
||||||
(let [collection "things"]
|
(mc/insert-batch db coll batch)
|
||||||
(is (not (mc/any? collection)))))
|
(is (= "small" (:size (mc/find-one-as-map db coll {:type "rock"}))))
|
||||||
|
(mc/update db coll {:type "rock"} {"$set" {:size "huge"}})
|
||||||
|
(is (= "huge" (:size (mc/find-one-as-map db coll {:type "rock"})))))
|
||||||
|
|
||||||
(deftest test-any-on-non-empty-collection
|
(deftest test-upsert
|
||||||
(let [collection "things"
|
(is (mc/empty? db coll))
|
||||||
_ (mc/insert collection {:language "Clojure" :name "langohr"})]
|
(mc/upsert db coll {:_id 4} {"$set" {:size "tiny"}})
|
||||||
(is (mc/any? "things"))
|
(is (not (mc/empty? db coll)))
|
||||||
(is (mc/any? mg/*mongodb-database* "things" {:language "Clojure"}))))
|
(mc/upsert db coll {:_id 4} {"$set" {:size "big"}})
|
||||||
|
(is (= [{:_id 4 :size "big"}] (mc/find-maps db coll {:_id 4}))))
|
||||||
|
|
||||||
(deftest test-empty-on-empty-collection
|
(deftest test-update-by-id
|
||||||
(let [collection "things"]
|
(mc/insert-batch db coll batch)
|
||||||
(is (mc/empty? collection))
|
(is (= "bed" (:type (mc/find-one-as-map db coll {:_id 2}))))
|
||||||
(is (mc/empty? mg/*mongodb-database* collection))))
|
(mc/update-by-id db coll 2 {"$set" {:type "living room"}})
|
||||||
|
(is (= "living room" (:type (mc/find-one-as-map db coll {:_id 2})))))
|
||||||
|
|
||||||
(deftest test-empty-on-non-empty-collection
|
(deftest test-update-by-ids
|
||||||
(let [collection "things"
|
(mc/insert-batch db coll batch)
|
||||||
_ (mc/insert collection {:language "Clojure" :name "langohr"})]
|
(is (= "bed" (:type (mc/find-one-as-map db coll {:_id 2}))))
|
||||||
(is (not (mc/empty? "things")))))
|
(is (= "bottle" (:type (mc/find-one-as-map db coll {:_id 3}))))
|
||||||
|
(mc/update-by-ids db coll [2 3] {"$set" {:type "dog"}})
|
||||||
|
(is (= "dog" (:type (mc/find-one-as-map db coll {:_id 2}))))
|
||||||
|
(is (= "dog" (:type (mc/find-one-as-map db coll {:_id 3}))))))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; miscellenous
|
||||||
|
;;
|
||||||
|
|
||||||
;;
|
(deftest test-system-collection-predicate
|
||||||
;; distinct
|
(are [name] (is (mc/system-collection? name))
|
||||||
;;
|
"system.indexes"
|
||||||
|
"system"
|
||||||
(deftest test-distinct-values
|
;; we treat default GridFS collections as system ones,
|
||||||
(let [collection "widgets"
|
;; possibly this is a bad idea, time will tell. MK.
|
||||||
batch [{:state "CA" :quantity 1 :price 199.00}
|
"fs.chunks"
|
||||||
{:state "NY" :quantity 2 :price 199.00}
|
"fs.files")
|
||||||
{:state "NY" :quantity 1 :price 299.00}
|
(are [name] (is (not (mc/system-collection? name)))
|
||||||
{:state "IL" :quantity 2 :price 11.50 }
|
"events"
|
||||||
{:state "CA" :quantity 2 :price 2.95 }
|
"accounts"
|
||||||
{:state "IL" :quantity 3 :price 5.50 }]]
|
"megacorp_account"
|
||||||
(mc/insert-batch collection batch)
|
"myapp_development")))
|
||||||
(is (= ["CA" "IL" "NY"] (sort (mc/distinct mg/*mongodb-database* collection :state {}))))
|
|
||||||
(is (= ["CA" "NY"] (sort (mc/distinct collection :state {:price {$gt 100.00}}))))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; miscellenous
|
|
||||||
;;
|
|
||||||
|
|
||||||
(deftest test-system-collection-predicate
|
|
||||||
(are [name] (is (mc/system-collection? name))
|
|
||||||
"system.indexes"
|
|
||||||
"system"
|
|
||||||
;; we treat default GridFS collections as system ones,
|
|
||||||
;; possibly this is a bad idea, time will tell. MK.
|
|
||||||
"fs.chunks"
|
|
||||||
"fs.files")
|
|
||||||
(are [name] (is (not (mc/system-collection? name)))
|
|
||||||
"events"
|
|
||||||
"accounts"
|
|
||||||
"megacorp_account"
|
|
||||||
"myapp_development"))
|
|
||||||
|
|
|
||||||
|
|
@ -1,50 +1,29 @@
|
||||||
(ns monger.test.command-test
|
(ns monger.test.command-test
|
||||||
(:require [monger.core :as mg]
|
(:require [monger.core :as mg]
|
||||||
[monger.command :as mcom]
|
[monger.command :as mcom]
|
||||||
[monger.test.helper :as helper]
|
[monger.collection :as mc]
|
||||||
[monger.collection :as mc])
|
[clojure.test :refer :all]
|
||||||
(:use clojure.test
|
[monger.result :refer [acknowledged?]]
|
||||||
[monger.result :only [ok?]]
|
[monger.conversion :refer [from-db-object]]))
|
||||||
[monger.conversion :only [from-db-object]]))
|
|
||||||
|
|
||||||
(helper/connect!)
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")]
|
||||||
|
(deftest ^{:command true} test-reindex-collection
|
||||||
|
(let [_ (mc/insert db "test" {:name "Clojure"})
|
||||||
|
result (mcom/reindex-collection db "test")]
|
||||||
|
(is (acknowledged? result))))
|
||||||
|
|
||||||
|
(deftest ^{:command true} test-server-status
|
||||||
|
(let [status (mcom/server-status db)]
|
||||||
|
(is (acknowledged? status))
|
||||||
|
(is (not-empty status))))
|
||||||
|
|
||||||
(deftest ^{:command true} test-db-stats
|
(deftest ^{:command true} test-top
|
||||||
(let [stats (mcom/db-stats)]
|
(let [result (mcom/top conn)]
|
||||||
(is (ok? stats))
|
(is (acknowledged? result))
|
||||||
(is (= "monger-test" (get stats "db")))))
|
(is (not-empty result))))
|
||||||
|
|
||||||
(deftest ^{:command true} test-collection-stats
|
(deftest ^{:command true} test-running-is-master-as-an-arbitrary-command
|
||||||
(let [collection "stat_test"
|
(let [raw (mg/command db {:isMaster 1})
|
||||||
_ (mc/insert collection {:name "Clojure"})
|
result (from-db-object raw true)]
|
||||||
check (mc/count collection)
|
(is (acknowledged? raw)))))
|
||||||
stats (mcom/collection-stats collection)]
|
|
||||||
(is (ok? stats))
|
|
||||||
(is (= "monger-test.stat_test" (get stats "ns")))
|
|
||||||
(is (= check (get stats "count")))))
|
|
||||||
|
|
||||||
(deftest ^{:command true} test-reindex-collection
|
|
||||||
(let [_ (mc/insert "test" {:name "Clojure"})
|
|
||||||
result (mcom/reindex-collection "test")]
|
|
||||||
(is (ok? result))
|
|
||||||
(is (get result "indexes"))))
|
|
||||||
|
|
||||||
(deftest ^{:command true} test-server-status
|
|
||||||
(let [status (mcom/server-status)]
|
|
||||||
(is (ok? status))
|
|
||||||
(is (not-empty status))
|
|
||||||
(is (get status "serverUsed"))))
|
|
||||||
|
|
||||||
(deftest ^{:command true} test-top
|
|
||||||
(let [result (mcom/top)]
|
|
||||||
(is (ok? result))
|
|
||||||
(is (not-empty result))
|
|
||||||
(is (get result "serverUsed"))))
|
|
||||||
|
|
||||||
(deftest ^{:command true} test-running-is-master-as-an-arbitrary-command
|
|
||||||
(let [raw (mg/command {:isMaster 1})
|
|
||||||
result (from-db-object raw true)]
|
|
||||||
(is (ok? result))
|
|
||||||
(is (ok? raw))
|
|
||||||
(is (:ismaster result))))
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,11 @@
|
||||||
(ns monger.test.conversion-test
|
(ns monger.test.conversion-test
|
||||||
(:require [monger core collection])
|
(:require [monger core collection]
|
||||||
|
[clojure.test :refer :all]
|
||||||
|
[monger.conversion :refer :all])
|
||||||
(:import [com.mongodb DBObject BasicDBObject BasicDBList]
|
(:import [com.mongodb DBObject BasicDBObject BasicDBList]
|
||||||
[java.util Date Calendar List ArrayList]
|
[java.util Date Calendar List ArrayList]
|
||||||
org.bson.types.ObjectId)
|
org.bson.types.ObjectId
|
||||||
(:use clojure.test monger.conversion))
|
(org.bson.types Decimal128)))
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
|
|
@ -100,6 +102,13 @@
|
||||||
(is (= 2 (from-db-object 2 false)))
|
(is (= 2 (from-db-object 2 false)))
|
||||||
(is (= 2 (from-db-object 2 true))))
|
(is (= 2 (from-db-object 2 true))))
|
||||||
|
|
||||||
|
(deftest convert-decimal-from-dbobject
|
||||||
|
(is (= 2.3M (from-db-object (Decimal128. 2.3M) false)))
|
||||||
|
(is (= 2.3M (from-db-object (Decimal128. 2.3M) true)))
|
||||||
|
(is (= 2.3M (from-db-object (Decimal128/parse "2.3") true)))
|
||||||
|
(is (not= 2.32M (from-db-object (Decimal128/parse "2.3") true)))
|
||||||
|
)
|
||||||
|
|
||||||
(deftest convert-float-from-dbobject
|
(deftest convert-float-from-dbobject
|
||||||
(is (= 3.3 (from-db-object 3.3 false)))
|
(is (= 3.3 (from-db-object 3.3 false)))
|
||||||
(is (= 3.3 (from-db-object 3.3 true))))
|
(is (= 3.3 (from-db-object 3.3 true))))
|
||||||
|
|
@ -111,20 +120,20 @@
|
||||||
(.put "name" name)
|
(.put "name" name)
|
||||||
(.put "age" age))
|
(.put "age" age))
|
||||||
output (from-db-object input false)]
|
output (from-db-object input false)]
|
||||||
(is (= (output { "name" name, "age" age })))
|
(is (= output { "name" name, "age" age }))
|
||||||
(is (= (output "name") name))
|
(is (= (output "name") name))
|
||||||
(is (nil? (output :name)))
|
(is (nil? (output :name)))
|
||||||
(is (= (output "age") age))
|
(is (= (output "age") age))
|
||||||
(is (nil? (output "points")))))
|
(is (nil? (output "points")))))
|
||||||
|
|
||||||
(deftest convert-flat-db-object-to-map-without-keywordizing
|
(deftest convert-flat-db-object-to-map-with-keywordizing
|
||||||
(let [name "Michael"
|
(let [name "Michael"
|
||||||
age 26
|
age 26
|
||||||
input (doto (BasicDBObject.)
|
input (doto (BasicDBObject.)
|
||||||
(.put "name" name)
|
(.put "name" name)
|
||||||
(.put "age" age))
|
(.put "age" age))
|
||||||
output (from-db-object input true)]
|
output (from-db-object input true)]
|
||||||
(is (= (output { :name name, :age age })))
|
(is (= output { :name name, :age age }))
|
||||||
(is (= (output :name) name))
|
(is (= (output :name) name))
|
||||||
(is (nil? (output "name")))
|
(is (nil? (output "name")))
|
||||||
(is (= (output :age) age))
|
(is (= (output :age) age))
|
||||||
|
|
|
||||||
|
|
@ -1,79 +1,28 @@
|
||||||
(ns monger.test.core-test
|
(ns monger.test.core-test
|
||||||
(:require [monger core collection util result]
|
(:require [monger util result]
|
||||||
[monger.test.helper :as helper]
|
[monger.core :as mg :refer [server-address mongo-options]]
|
||||||
[monger.collection :as mc])
|
[monger.collection :as mc]
|
||||||
(:import [com.mongodb Mongo DB WriteConcern MongoOptions ServerAddress])
|
[clojure.test :refer :all])
|
||||||
(:use clojure.test
|
(:import [com.mongodb MongoClient DB WriteConcern MongoClientOptions ServerAddress]))
|
||||||
[monger.core :only [server-address mongo-options]]))
|
|
||||||
|
|
||||||
(println (str "Using Clojure version " *clojure-version*))
|
(println (str "Using Clojure version " *clojure-version*))
|
||||||
(helper/connect!)
|
|
||||||
|
|
||||||
(deftest connect-to-mongo-with-default-host-and-port
|
(deftest connect-to-mongo-with-default-host-and-port
|
||||||
(let [connection (monger.core/connect)]
|
(let [connection (mg/connect)]
|
||||||
(is (instance? com.mongodb.Mongo connection))))
|
(is (instance? com.mongodb.MongoClient connection))))
|
||||||
|
|
||||||
|
(deftest connect-and-disconnect
|
||||||
|
(let [conn (mg/connect)]
|
||||||
|
(mg/disconnect conn)))
|
||||||
|
|
||||||
(deftest connect-to-mongo-with-default-host-and-explicit-port
|
(deftest connect-to-mongo-with-default-host-and-explicit-port
|
||||||
(let [connection (monger.core/connect { :port 27017 })]
|
(let [connection (mg/connect {:port 27017})]
|
||||||
(is (instance? com.mongodb.Mongo connection))))
|
(is (instance? com.mongodb.MongoClient connection))))
|
||||||
|
|
||||||
|
|
||||||
(deftest connect-to-mongo-with-default-port-and-explicit-host
|
(deftest connect-to-mongo-with-default-port-and-explicit-host
|
||||||
(let [connection (monger.core/connect { :host "127.0.0.1" })]
|
(let [connection (mg/connect {:host "127.0.0.1"})]
|
||||||
(is (instance? com.mongodb.Mongo connection))))
|
(is (instance? com.mongodb.MongoClient connection))))
|
||||||
|
|
||||||
(when-not (System/getenv "CI")
|
|
||||||
(deftest connect-to-mongo-via-uri-without-credentials
|
|
||||||
(let [connection (monger.core/connect-via-uri! "mongodb://127.0.0.1/monger-test4")]
|
|
||||||
(is (= (-> connection .getAddress ^InetAddress (.sameHost "127.0.0.1")))))
|
|
||||||
;; reconnect using regular host
|
|
||||||
(helper/connect!))
|
|
||||||
|
|
||||||
(deftest connect-to-mongo-via-uri-with-valid-credentials
|
|
||||||
(let [connection (monger.core/connect-via-uri! "mongodb://clojurewerkz/monger!:monger!@127.0.0.1/monger-test4")]
|
|
||||||
(is (= "monger-test4" (.getName (monger.core/current-db))))
|
|
||||||
(is (= (-> connection .getAddress ^InetAddress (.sameHost "127.0.0.1"))))
|
|
||||||
(mc/remove "documents")
|
|
||||||
;; make sure that the database is selected
|
|
||||||
;; and operations get through.
|
|
||||||
(mc/insert "documents" {:field "value"})
|
|
||||||
(is (= 1 (mc/count "documents" {}))))
|
|
||||||
;; reconnect using regular host
|
|
||||||
(helper/connect!)))
|
|
||||||
|
|
||||||
(if-let [uri (System/getenv "MONGOHQ_URL")]
|
|
||||||
(deftest ^{:external true} connect-to-mongo-via-uri-with-valid-credentials
|
|
||||||
(let [connection (monger.core/connect-via-uri! uri)]
|
|
||||||
(is (= (-> connection .getAddress ^InetAddress (.sameHost "127.0.0.1")))))
|
|
||||||
;; reconnect using regular host
|
|
||||||
(helper/connect!)))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest connect-to-mongo-via-uri-with-invalid-credentials
|
|
||||||
(is (thrown? IllegalArgumentException
|
|
||||||
(monger.core/connect-via-uri! "mongodb://clojurewerkz/monger!:ahsidaysd78jahsdi8@127.0.0.1/monger-test4"))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-mongo-options-builder
|
|
||||||
(let [max-wait-time (* 1000 60 2)
|
|
||||||
^MongoOptions result (monger.core/mongo-options :connections-per-host 3 :threads-allowed-to-block-for-connection-multiplier 50
|
|
||||||
:max-wait-time max-wait-time :connect-timeout 10 :socket-timeout 10 :socket-keep-alive true
|
|
||||||
:auto-connect-retry true :max-auto-connect-retry-time 0 :safe true
|
|
||||||
:w 1 :w-timeout 20 :fsync true :j true)]
|
|
||||||
(is (= 3 (. result connectionsPerHost)))
|
|
||||||
(is (= 50 (. result threadsAllowedToBlockForConnectionMultiplier)))
|
|
||||||
(is (= max-wait-time (.maxWaitTime result)))
|
|
||||||
(is (= 10 (.connectTimeout result)))
|
|
||||||
(is (= 10 (.socketTimeout result)))
|
|
||||||
(is (.socketKeepAlive result))
|
|
||||||
(is (.autoConnectRetry result))
|
|
||||||
(is (= 0 (.maxAutoConnectRetryTime result)))
|
|
||||||
(is (.safe result))
|
|
||||||
(is (= 1 (.w result)))
|
|
||||||
(is (= 20 (.wtimeout result)))
|
|
||||||
(is (.fsync result))
|
|
||||||
(is (.j result))))
|
|
||||||
|
|
||||||
(deftest test-server-address
|
(deftest test-server-address
|
||||||
(let [host "127.0.0.1"
|
(let [host "127.0.0.1"
|
||||||
|
|
@ -83,32 +32,58 @@
|
||||||
(is (= port (.getPort sa)))))
|
(is (= port (.getPort sa)))))
|
||||||
|
|
||||||
(deftest use-existing-mongo-connection
|
(deftest use-existing-mongo-connection
|
||||||
(let [^MongoOptions opts (mongo-options :threads-allowed-to-block-for-connection-multiplier 300)
|
(let [^MongoClientOptions opts (mongo-options {:threads-allowed-to-block-for-connection-multiplier 300})
|
||||||
connection (Mongo. "127.0.0.1" opts)]
|
connection (MongoClient. "127.0.0.1" opts)
|
||||||
(monger.core/set-connection! connection)
|
db (mg/get-db connection "monger-test")]
|
||||||
(is (= monger.core/*mongodb-connection* connection))))
|
(mg/disconnect connection)))
|
||||||
|
|
||||||
(deftest connect-to-mongo-with-extra-options
|
(deftest connect-to-mongo-with-extra-options
|
||||||
(let [^MongoOptions opts (mongo-options :threads-allowed-to-block-for-connection-multiplier 300)
|
(let [^MongoClientOptions opts (mongo-options {:threads-allowed-to-block-for-connection-multiplier 300})
|
||||||
^ServerAddress sa (server-address "127.0.0.1" 27017)]
|
^ServerAddress sa (server-address "127.0.0.1" 27017)
|
||||||
(monger.core/connect! sa opts)))
|
conn (mg/connect sa opts)]
|
||||||
|
(mg/disconnect conn)))
|
||||||
|
|
||||||
|
|
||||||
(deftest get-database
|
(deftest get-database
|
||||||
(let [connection (monger.core/connect)
|
(let [connection (mg/connect)
|
||||||
db (monger.core/get-db connection "monger-test")]
|
db (mg/get-db connection "monger-test")]
|
||||||
(is (instance? com.mongodb.DB db))))
|
(is (instance? com.mongodb.DB db))))
|
||||||
|
|
||||||
|
|
||||||
(deftest test-get-db-names
|
(deftest test-get-db-names
|
||||||
(let [dbs (monger.core/get-db-names)]
|
(let [conn (mg/connect)
|
||||||
|
dbs (mg/get-db-names conn)]
|
||||||
(is (not (empty? dbs)))
|
(is (not (empty? dbs)))
|
||||||
(is (dbs "monger-test"))))
|
(is (dbs "monger-test"))))
|
||||||
|
|
||||||
(deftest get-last-error
|
(deftest monger-options-test
|
||||||
(let [connection (monger.core/connect)
|
(let [opts {:always-use-mbeans true
|
||||||
db (monger.core/get-db connection "monger-test")]
|
:application-name "app"
|
||||||
(is (monger.result/ok? (monger.core/get-last-error)))
|
:connect-timeout 1
|
||||||
(is (monger.result/ok? (monger.core/get-last-error db)))
|
:connections-per-host 1
|
||||||
(is (monger.result/ok? (monger.core/get-last-error db WriteConcern/NORMAL)))
|
:cursor-finalizer-enabled true
|
||||||
(is (monger.result/ok? (monger.core/get-last-error db 1 100 true)))))
|
:description "Description"
|
||||||
|
:heartbeat-connect-timeout 1
|
||||||
|
:heartbeat-frequency 1
|
||||||
|
:heartbeat-socket-timeout 1
|
||||||
|
:local-threshold 1
|
||||||
|
:max-connection-idle-time 1
|
||||||
|
:max-connection-life-time 1
|
||||||
|
:max-wait-time 1
|
||||||
|
:min-connections-per-host 1
|
||||||
|
:min-heartbeat-frequency 1
|
||||||
|
:required-replica-set-name "rs"
|
||||||
|
:retry-writes true
|
||||||
|
:server-selection-timeout 1
|
||||||
|
:socket-keep-alive true
|
||||||
|
:socket-timeout 1
|
||||||
|
:ssl-enabled true
|
||||||
|
:ssl-invalid-host-name-allowed true
|
||||||
|
:threads-allowed-to-block-for-connection-multiplier 1
|
||||||
|
:uuid-representation org.bson.UuidRepresentation/STANDARD
|
||||||
|
:write-concern com.mongodb.WriteConcern/JOURNAL_SAFE}]
|
||||||
|
(is (instance? com.mongodb.MongoClientOptions$Builder (mg/mongo-options-builder opts)))))
|
||||||
|
|
||||||
|
(deftest connect-to-uri-without-db-name
|
||||||
|
(let [uri "mongodb://localhost:27017"]
|
||||||
|
(is (thrown? IllegalArgumentException (mg/connect-via-uri uri)))))
|
||||||
|
|
|
||||||
107
test/monger/test/cursor_test.clj
Normal file
107
test/monger/test/cursor_test.clj
Normal file
|
|
@ -0,0 +1,107 @@
|
||||||
|
(ns monger.test.cursor-test
|
||||||
|
(:import [com.mongodb DBCursor DBObject Bytes]
|
||||||
|
[java.util List Map])
|
||||||
|
(:require [monger.core :as mg]
|
||||||
|
[clojure.test :refer :all]
|
||||||
|
[monger.cursor :refer :all]))
|
||||||
|
|
||||||
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")]
|
||||||
|
(deftest make-db-cursor-for-collection
|
||||||
|
(is (= DBCursor
|
||||||
|
(class (make-db-cursor db :docs)))))
|
||||||
|
|
||||||
|
(deftest getting-cursor-options-value
|
||||||
|
(let [db-cur (make-db-cursor db :docs)
|
||||||
|
opts (get-options db-cur)]
|
||||||
|
(is (= true (isa? (class opts) Map)))
|
||||||
|
(is (= 0 (.getOptions db-cur))) ;;test default value
|
||||||
|
(is (= false (:notimeout opts)))
|
||||||
|
(is (= false (:partial opts)))
|
||||||
|
(is (= false (:awaitdata opts)))
|
||||||
|
(is (= false (:oplogreplay opts)))
|
||||||
|
(is (= false (:slaveok opts)))
|
||||||
|
(is (= false (:tailable opts)))))
|
||||||
|
|
||||||
|
(deftest adding-option-to-cursor
|
||||||
|
(let [db-cur (make-db-cursor db :docs)]
|
||||||
|
(add-option! db-cur :notimeout)
|
||||||
|
(is (= (:notimeout cursor-options)
|
||||||
|
(.getOptions db-cur)))
|
||||||
|
(add-option! db-cur :tailable)
|
||||||
|
(is (= (.getOptions db-cur)
|
||||||
|
(bit-or (:notimeout cursor-options)
|
||||||
|
(:tailable cursor-options))))))
|
||||||
|
|
||||||
|
(deftest remove-option-from-cursor
|
||||||
|
(let [db-cur (make-db-cursor db :docs)]
|
||||||
|
(add-option! db-cur :partial)
|
||||||
|
(add-option! db-cur :awaitdata)
|
||||||
|
;; removing not-set option should not affect result
|
||||||
|
(remove-option! db-cur :notimeout)
|
||||||
|
(is (= (.getOptions db-cur)
|
||||||
|
(bit-or (:partial cursor-options)
|
||||||
|
(:awaitdata cursor-options))))
|
||||||
|
;; removing active option should remove correct value
|
||||||
|
(remove-option! db-cur :awaitdata)
|
||||||
|
(is (= (.getOptions db-cur)
|
||||||
|
(:partial cursor-options)))))
|
||||||
|
|
||||||
|
|
||||||
|
(deftest test-reset-options
|
||||||
|
(let [db-cur (make-db-cursor db :docs)]
|
||||||
|
(add-option! db-cur :partial)
|
||||||
|
(is (= (.getOptions db-cur)
|
||||||
|
(:partial cursor-options)))
|
||||||
|
(is (= 0
|
||||||
|
(int (.getOptions (reset-options db-cur)))))))
|
||||||
|
|
||||||
|
(deftest add-options-with-hashmap
|
||||||
|
(let [db-cur (make-db-cursor db :docs)
|
||||||
|
_ (add-options db-cur {:notimeout true :slaveok true})
|
||||||
|
opts (get-options db-cur)]
|
||||||
|
(is (= true (:notimeout opts)))
|
||||||
|
(is (= true (:slaveok opts)))
|
||||||
|
(is (= false (:tailable opts)))
|
||||||
|
(is (= false (:oplogreplay opts)))))
|
||||||
|
|
||||||
|
(deftest add-options-with-hashmap-and-remove-option
|
||||||
|
(let [db-cur (make-db-cursor db :docs)
|
||||||
|
_ (add-options db-cur {:notimeout true :slaveok true})
|
||||||
|
opts (get-options db-cur)]
|
||||||
|
(is (= true (:notimeout opts)))
|
||||||
|
(is (= true (:slaveok opts)))
|
||||||
|
;;remove key and add another option
|
||||||
|
(add-options db-cur {:partial true :slaveok false})
|
||||||
|
(let [opts (get-options db-cur)]
|
||||||
|
(is (= true (:notimeout opts)))
|
||||||
|
(is (= true (:partial opts)))
|
||||||
|
(is (= false (:slaveok opts)))
|
||||||
|
(is (= false (:tailable opts))))))
|
||||||
|
|
||||||
|
(deftest add-options-with-list
|
||||||
|
(let [db-cur (make-db-cursor db :docs)
|
||||||
|
_ (add-options db-cur [:notimeout :slaveok])
|
||||||
|
opts (get-options db-cur)]
|
||||||
|
(is (= true (:notimeout opts)))
|
||||||
|
(is (= true (:slaveok opts)))
|
||||||
|
(is (= false (:tailable opts)))
|
||||||
|
(is (= false (:oplogreplay opts)))))
|
||||||
|
|
||||||
|
(deftest add-options-with-Bytes
|
||||||
|
(let [db-cur (make-db-cursor db :docs)
|
||||||
|
_ (add-options db-cur Bytes/QUERYOPTION_NOTIMEOUT)
|
||||||
|
opts (get-options db-cur)]
|
||||||
|
(is (= true (:notimeout opts)))
|
||||||
|
(is (= false (:slaveok opts)))
|
||||||
|
(is (= false (:tailable opts)))
|
||||||
|
(is (= false (:oplogreplay opts)))))
|
||||||
|
|
||||||
|
(deftest add-options-with-one-keyword
|
||||||
|
(let [db-cur (make-db-cursor db :docs)
|
||||||
|
_ (add-options db-cur :notimeout)
|
||||||
|
opts (get-options db-cur)]
|
||||||
|
(is (= true (:notimeout opts)))
|
||||||
|
(is (= false (:slaveok opts)))
|
||||||
|
(is (= false (:tailable opts)))
|
||||||
|
(is (= false (:oplogreplay opts))))))
|
||||||
|
|
@ -1,54 +1,31 @@
|
||||||
(ns monger.test.db-test
|
(ns monger.test.db-test
|
||||||
(:require [monger core db]
|
(:require [monger.db :as mdb]
|
||||||
[monger.test.helper :as helper]
|
[monger.core :as mg]
|
||||||
[monger.collection :as mc])
|
[monger.collection :as mc]
|
||||||
|
[clojure.test :refer :all])
|
||||||
(:import [com.mongodb Mongo DB]
|
(:import [com.mongodb Mongo DB]
|
||||||
java.util.Set)
|
java.util.Set))
|
||||||
(:use clojure.test))
|
|
||||||
|
|
||||||
(helper/connect!)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-add-user
|
|
||||||
(let [username "clojurewerkz/monger!"
|
|
||||||
pwd (.toCharArray "monger!")
|
|
||||||
db-name "monger-test4"]
|
|
||||||
;; use a secondary database here. MK.
|
|
||||||
(monger.core/with-db (monger.core/get-db db-name)
|
|
||||||
(monger.db/add-user username pwd)
|
|
||||||
(is (monger.core/authenticate db-name username pwd)))))
|
|
||||||
|
|
||||||
|
|
||||||
;; do not run this test for CI, it complicates matters by messing up
|
;; do not run this test for CI, it complicates matters by messing up
|
||||||
;; authentication for some other tests :( MK.
|
;; authentication for some other tests :( MK.
|
||||||
(when-not (System/getenv "CI")
|
(let [conn (mg/connect)]
|
||||||
(deftest test-drop-database
|
(when-not (System/getenv "CI")
|
||||||
;; drop a secondary database here. MK.
|
(deftest test-drop-database
|
||||||
(monger.core/with-db (monger.core/get-db "monger-test3")
|
;; drop a secondary database here. MK.
|
||||||
(let [collection "test"
|
(let [db (mg/get-db conn "monger-test3")
|
||||||
_ (mc/insert collection {:name "Clojure"})
|
collection "test"
|
||||||
check (mc/count collection)
|
_ (mc/insert db collection {:name "Clojure"})
|
||||||
_ (monger.db/drop-db)]
|
check (mc/count db collection)
|
||||||
|
_ (mdb/drop-db db)]
|
||||||
(is (= 1 check))
|
(is (= 1 check))
|
||||||
(is (not (mc/exists? collection)))
|
(is (not (mc/exists? db collection)))
|
||||||
(is (= 0 (mc/count collection))))))
|
(is (= 0 (mc/count db collection))))))
|
||||||
|
|
||||||
(deftest test-use-database
|
(deftest test-get-collection-names
|
||||||
(monger.core/use-db! "monger-test5")
|
(let [db (mg/get-db conn "monger-test")]
|
||||||
(is (= "monger-test5" (.getName (monger.core/current-db))))
|
(mc/insert db "test-1" {:name "Clojure"})
|
||||||
(let [collection "test"
|
(mc/insert db "test-2" {:name "Clojure"})
|
||||||
_ (mc/insert collection {:name "Clojure"})
|
(let [^Set xs (mdb/get-collection-names db)]
|
||||||
check (mc/count collection)
|
(is (.contains xs "test-1"))
|
||||||
_ (monger.db/drop-db)]
|
(is (.contains xs "test-2"))))))
|
||||||
(is (= 1 check))
|
|
||||||
(is (not (mc/exists? collection)))
|
|
||||||
(is (= 0 (mc/count collection))))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-get-collection-names
|
|
||||||
(mc/insert "test-1" {:name "Clojure"})
|
|
||||||
(mc/insert "test-2" {:name "Clojure"})
|
|
||||||
(let [^Set collections (monger.db/get-collection-names)]
|
|
||||||
(is (.contains collections "test-1"))
|
|
||||||
(is (.contains collections "test-2"))))
|
|
||||||
|
|
|
||||||
|
|
@ -1,155 +0,0 @@
|
||||||
(ns monger.test.factory-dsl-test
|
|
||||||
(:use clojure.test
|
|
||||||
[monger testkit joda-time]
|
|
||||||
monger.test.fixtures
|
|
||||||
[clj-time.core :only [days ago weeks now]])
|
|
||||||
(:require [monger.collection :as mc]
|
|
||||||
[monger.test.helper :as helper])
|
|
||||||
(:import org.bson.types.ObjectId
|
|
||||||
org.joda.time.DateTime))
|
|
||||||
|
|
||||||
|
|
||||||
(helper/connect!)
|
|
||||||
|
|
||||||
(use-fixtures :each purge-domains purge-pages)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(defaults-for "domains"
|
|
||||||
:ipv6-enabled false)
|
|
||||||
|
|
||||||
(let [coll "domains"]
|
|
||||||
(factory coll "clojure"
|
|
||||||
:name "clojure.org"
|
|
||||||
:created-at (-> 2 days ago)
|
|
||||||
:embedded [(embedded-doc "pages" "http://clojure.org/lisp")
|
|
||||||
(embedded-doc "pages" "http://clojure.org/jvm_hosted")
|
|
||||||
(embedded-doc "pages" "http://clojure.org/runtime_polymorphism")])
|
|
||||||
|
|
||||||
(factory coll "elixir"
|
|
||||||
:_id (memoized-oid coll "elixir")
|
|
||||||
:name "elixir-lang.org"
|
|
||||||
:created-at (fn [] (now))
|
|
||||||
:topics (fn [] ["programming" "erlang" "beam" "ruby"])
|
|
||||||
:related {
|
|
||||||
:terms (fn [] ["erlang" "python" "ruby"])
|
|
||||||
}))
|
|
||||||
|
|
||||||
(let [coll "pages"]
|
|
||||||
(factory coll "http://clojure.org/rationale"
|
|
||||||
:name "/rationale"
|
|
||||||
:domain-id (parent-id "domains" "clojure"))
|
|
||||||
(factory coll "http://clojure.org/jvm_hosted"
|
|
||||||
:name "/jvm_hosted")
|
|
||||||
(factory coll "http://clojure.org/runtime_polymorphism"
|
|
||||||
:name "/runtime_polymorphism")
|
|
||||||
(factory coll "http://clojure.org/lisp"
|
|
||||||
:name "/lisp")
|
|
||||||
(factory coll "http://elixir-lang.org/getting_started"
|
|
||||||
:name "/getting_started/1.html"
|
|
||||||
:domain-id (memoized-oid "domains" "elixir")))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-building-documents-from-a-factory-case-1
|
|
||||||
(let [t (-> 2 weeks ago)
|
|
||||||
doc (build "domains" "clojure" :created-at t)]
|
|
||||||
(is (:_id doc))
|
|
||||||
(is (= t (:created-at doc)))
|
|
||||||
(is (= "clojure.org" (:name doc)))
|
|
||||||
(is (false? (:ipv6-enabled doc)))))
|
|
||||||
|
|
||||||
(deftest test-building-documents-from-a-factory-case-2
|
|
||||||
(let [oid (ObjectId.)
|
|
||||||
doc (build "domains" "clojure" :_id oid)]
|
|
||||||
(is (= oid (:_id doc)))
|
|
||||||
(is (= "clojure.org" (:name doc)))
|
|
||||||
(is (false? (:ipv6-enabled doc)))))
|
|
||||||
|
|
||||||
(deftest test-building-documents-from-a-factory-case-3
|
|
||||||
(let [oid (ObjectId.)
|
|
||||||
t (-> 3 weeks ago)
|
|
||||||
doc (build "domains" "clojure" :_id oid :created-at t :name "clojurewerkz.org" :ipv6-enabled true)]
|
|
||||||
(is (= oid (:_id doc)))
|
|
||||||
(is (= t (:created-at doc)))
|
|
||||||
(is (= "clojurewerkz.org" (:name doc)))
|
|
||||||
(is (:ipv6-enabled doc))
|
|
||||||
(is (= ["/lisp" "/jvm_hosted" "/runtime_polymorphism"]
|
|
||||||
(vec (map :name (:embedded doc)))))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-building-documents-from-a-factory-case-4
|
|
||||||
(let [doc (build "domains" "elixir")]
|
|
||||||
(is (:_id doc))
|
|
||||||
(is (= (:_id doc) (memoized-oid "domains" "elixir")))
|
|
||||||
(is (instance? DateTime (:created-at doc)))
|
|
||||||
(is (= ["erlang" "python" "ruby"] (get-in doc [:related :terms])))
|
|
||||||
(is (= "elixir-lang.org" (:name doc)))
|
|
||||||
(is (not (:ipv6-enabled doc)))))
|
|
||||||
|
|
||||||
(deftest test-building-child-documents-with-a-parent-ref-case-1
|
|
||||||
(let [doc (build "pages" "http://clojure.org/rationale")]
|
|
||||||
(is (:domain-id doc))))
|
|
||||||
|
|
||||||
(deftest test-building-child-documents-that-use-memoized-oids-for-parents
|
|
||||||
(let [doc (build "pages" "http://elixir-lang.org/getting_started")]
|
|
||||||
(is (= (:domain-id doc) (memoized-oid "domains" "elixir")))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-seeding-documents-using-a-factory-case-1
|
|
||||||
(is (mc/empty? "domains"))
|
|
||||||
(let [t (-> 2 weeks ago)
|
|
||||||
doc (seed "domains" "clojure" :created-at t)]
|
|
||||||
(is (= 1 (mc/count "domains")))
|
|
||||||
(is (:_id doc))
|
|
||||||
(is (= (:_id doc) (last-oid-of "domains" "clojure")))
|
|
||||||
(is (= t (:created-at doc)))
|
|
||||||
(is (= "clojure.org" (:name doc)))
|
|
||||||
(is (false? (:ipv6-enabled doc)))))
|
|
||||||
|
|
||||||
(deftest test-seeding-documents-using-a-factory-case-2
|
|
||||||
(is (mc/empty? "domains"))
|
|
||||||
(let [doc (seed "domains" "elixir")
|
|
||||||
loaded (first (mc/find-maps "domains"))]
|
|
||||||
(is (= 1 (mc/count "domains")))
|
|
||||||
(is (:_id doc))
|
|
||||||
(is (= (:_id doc) (:_id loaded)))
|
|
||||||
(is (instance? DateTime (:created-at loaded)))
|
|
||||||
(is (= ["erlang" "python" "ruby"] (get-in loaded [:related :terms])))
|
|
||||||
(is (= "elixir-lang.org" (:name loaded)))
|
|
||||||
(is (not (:ipv6-enabled loaded)))))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-seeding-child-documents-with-a-parent-ref-case-1
|
|
||||||
(is (mc/empty? "domains"))
|
|
||||||
(is (mc/empty? "pages"))
|
|
||||||
(let [page (seed "pages" "http://clojure.org/rationale")
|
|
||||||
domain (mc/find-map-by-id "domains" (:domain-id page))]
|
|
||||||
(is (= 1 (mc/count "domains")))
|
|
||||||
(is (= 1 (mc/count "pages")))
|
|
||||||
(is domain)
|
|
||||||
(is (:domain-id page))
|
|
||||||
(is (= "clojure.org" (:name domain)))
|
|
||||||
(is (= "/rationale" (:name page)))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-seeding-all-factories-in-a-group
|
|
||||||
(is (mc/empty? "domains"))
|
|
||||||
(is (mc/empty? "pages"))
|
|
||||||
(seed-all "pages")
|
|
||||||
(is (>= (mc/count "domains") 1))
|
|
||||||
(is (>= (mc/count "pages") 4)))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-named-memoized-object-ids
|
|
||||||
(let [oid1 (memoized-oid "domains" "clojure.org")
|
|
||||||
oid2 (memoized-oid "domains" "python.org")]
|
|
||||||
(is (= oid1 (memoized-oid "domains" "clojure.org")))
|
|
||||||
(is (= oid1 (memoized-oid "domains" "clojure.org")))
|
|
||||||
(is (= oid1 (memoized-oid "domains" "clojure.org")))
|
|
||||||
(is (= oid1 (memoized-oid "domains" "clojure.org")))
|
|
||||||
(is (not (= oid1 oid2)))
|
|
||||||
(is (= oid2 (memoized-oid "domains" "python.org")))
|
|
||||||
(is (= oid2 (memoized-oid "domains" "python.org")))
|
|
||||||
(is (= oid2 (memoized-oid "domains" "python.org")))))
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
(ns monger.test.fixtures
|
|
||||||
(:require [monger.collection :as mgcol])
|
|
||||||
(:use monger.testkit))
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; fixture functions
|
|
||||||
;;
|
|
||||||
|
|
||||||
(defcleaner people "people")
|
|
||||||
(defcleaner docs "docs")
|
|
||||||
(defcleaner things "things")
|
|
||||||
(defcleaner libraries "libraries")
|
|
||||||
(defcleaner scores "scores")
|
|
||||||
(defcleaner locations "locations")
|
|
||||||
(defcleaner domains "domains")
|
|
||||||
(defcleaner pages "pages")
|
|
||||||
|
|
||||||
(defcleaner cached "cached")
|
|
||||||
|
|
||||||
(defcleaner migrations "meta.migrations")
|
|
||||||
28
test/monger/test/full_text_search_test.clj
Normal file
28
test/monger/test/full_text_search_test.clj
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
(ns monger.test.full-text-search-test
|
||||||
|
(:require [monger.core :as mg]
|
||||||
|
[monger.collection :as mc]
|
||||||
|
[monger.command :as cmd]
|
||||||
|
[monger.operators :refer :all]
|
||||||
|
[clojure.test :refer [deftest is use-fixtures]]
|
||||||
|
[monger.result :refer [acknowledged?]])
|
||||||
|
(:import com.mongodb.BasicDBObjectBuilder))
|
||||||
|
|
||||||
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")
|
||||||
|
coll "search-docs"]
|
||||||
|
|
||||||
|
(defn purge-collections
|
||||||
|
[f]
|
||||||
|
(mc/purge-many db [coll])
|
||||||
|
(f)
|
||||||
|
(mc/purge-many db [coll]))
|
||||||
|
|
||||||
|
(use-fixtures :each purge-collections)
|
||||||
|
|
||||||
|
(deftest ^{:search true} test-basic-full-text-search-query
|
||||||
|
(mc/ensure-index db coll (array-map :subject "text" :content "text"))
|
||||||
|
(mc/insert db coll {:subject "hello there" :content "this should be searchable"})
|
||||||
|
(mc/insert db coll {:subject "untitled" :content "this is just noize"})
|
||||||
|
(let [xs (mc/find-maps db coll {$text {$search "hello"}})]
|
||||||
|
(is (= 1 (count xs)))
|
||||||
|
(is (= "hello there" (-> xs first :subject))))))
|
||||||
|
|
@ -1,166 +1,212 @@
|
||||||
(ns monger.test.gridfs-test
|
(ns monger.test.gridfs-test
|
||||||
(:refer-clojure :exclude [count remove find])
|
(:refer-clojure :exclude [count remove find])
|
||||||
(:use clojure.test
|
|
||||||
[monger.core :only [count]]
|
|
||||||
monger.test.fixtures
|
|
||||||
[monger operators conversion]
|
|
||||||
[monger.gridfs :only (store make-input-file store-file filename content-type metadata)])
|
|
||||||
(:require [monger.gridfs :as gridfs]
|
(:require [monger.gridfs :as gridfs]
|
||||||
[monger.test.helper :as helper]
|
[clojure.java.io :as io]
|
||||||
[clojure.java.io :as io])
|
[clojure.test :refer :all]
|
||||||
|
[monger.core :as mg :refer [count]]
|
||||||
|
[monger.operators :refer :all]
|
||||||
|
[monger.conversion :refer :all]
|
||||||
|
[monger.gridfs :refer [store make-input-file store-file filename content-type metadata]])
|
||||||
(:import [java.io InputStream File FileInputStream]
|
(:import [java.io InputStream File FileInputStream]
|
||||||
[com.mongodb.gridfs GridFS GridFSInputFile GridFSDBFile]))
|
[com.mongodb.gridfs GridFS GridFSInputFile GridFSDBFile]))
|
||||||
|
|
||||||
(defn purge-gridfs*
|
(let [conn (mg/connect)
|
||||||
[]
|
db (mg/get-db conn "monger-test")
|
||||||
(gridfs/remove-all))
|
fs (mg/get-gridfs conn "monger-test")]
|
||||||
|
(defn purge-gridfs*
|
||||||
|
[]
|
||||||
|
(gridfs/remove-all fs))
|
||||||
|
|
||||||
(defn purge-gridfs
|
(defn purge-gridfs
|
||||||
[f]
|
[f]
|
||||||
(gridfs/remove-all)
|
(gridfs/remove-all fs)
|
||||||
(f)
|
(f)
|
||||||
(gridfs/remove-all))
|
(gridfs/remove-all fs))
|
||||||
|
|
||||||
(use-fixtures :each purge-gridfs)
|
(use-fixtures :each purge-gridfs)
|
||||||
|
|
||||||
(helper/connect!)
|
(deftest ^{:gridfs true} test-storing-files-to-gridfs-using-relative-fs-paths
|
||||||
|
(let [input "./test/resources/mongo/js/mapfun1.js"]
|
||||||
|
(is (= 0 (count (gridfs/all-files fs))))
|
||||||
|
(store (make-input-file fs input)
|
||||||
|
(.setFilename "monger.test.gridfs.file1")
|
||||||
|
(.setContentType "application/octet-stream"))
|
||||||
|
(is (= 1 (count (gridfs/all-files fs))))))
|
||||||
|
|
||||||
|
|
||||||
|
(deftest ^{:gridfs true} test-storing-files-to-gridfs-using-file-instances
|
||||||
|
(let [input (io/as-file "./test/resources/mongo/js/mapfun1.js")]
|
||||||
|
(is (= 0 (count (gridfs/all-files fs))))
|
||||||
|
(store-file (make-input-file fs input)
|
||||||
|
(filename "monger.test.gridfs.file2")
|
||||||
|
(content-type "application/octet-stream"))
|
||||||
|
(is (= 1 (count (gridfs/all-files fs))))))
|
||||||
|
|
||||||
|
(deftest ^{:gridfs true} test-storing-bytes-to-gridfs
|
||||||
|
(let [input (.getBytes "A string")
|
||||||
|
md {:format "raw" :source "AwesomeCamera D95"}
|
||||||
|
fname "monger.test.gridfs.file3"
|
||||||
|
ct "application/octet-stream"]
|
||||||
|
(is (= 0 (count (gridfs/all-files fs))))
|
||||||
|
(store-file (make-input-file fs input)
|
||||||
|
(filename fname)
|
||||||
|
(metadata md)
|
||||||
|
(content-type "application/octet-stream"))
|
||||||
|
(let [f (first (gridfs/files-as-maps fs))]
|
||||||
|
(is (= ct (:contentType f)))
|
||||||
|
(is (= fname (:filename f)))
|
||||||
|
(is (= md (:metadata f))))
|
||||||
|
(is (= 1 (count (gridfs/all-files fs))))))
|
||||||
|
|
||||||
|
(deftest ^{:gridfs true} test-storing-files-to-gridfs-using-absolute-fs-paths
|
||||||
|
(let [tmp-file (File/createTempFile "monger.test.gridfs" "test-storing-files-to-gridfs-using-absolute-fs-paths")
|
||||||
|
_ (spit tmp-file "Some content")
|
||||||
|
input (.getAbsolutePath tmp-file)]
|
||||||
|
(is (= 0 (count (gridfs/all-files fs))))
|
||||||
|
(store-file (make-input-file fs input)
|
||||||
|
(filename "monger.test.gridfs.file4")
|
||||||
|
(content-type "application/octet-stream"))
|
||||||
|
(is (= 1 (count (gridfs/all-files fs))))))
|
||||||
|
|
||||||
|
(deftest ^{:gridfs true} test-storing-files-to-gridfs-using-input-stream
|
||||||
|
(let [tmp-file (File/createTempFile "monger.test.gridfs" "test-storing-files-to-gridfs-using-input-stream")
|
||||||
|
_ (spit tmp-file "Some other content")]
|
||||||
|
(is (= 0 (count (gridfs/all-files fs))))
|
||||||
|
(store-file fs
|
||||||
|
(make-input-file (FileInputStream. tmp-file))
|
||||||
|
(filename "monger.test.gridfs.file4b")
|
||||||
|
(content-type "application/octet-stream"))
|
||||||
|
(is (= 1 (count (gridfs/all-files fs))))))
|
||||||
|
|
||||||
|
(deftest ^{:gridfs true} test-deleting-file-instance-on-disk-after-storing
|
||||||
|
(let [tmp-file (File/createTempFile "monger.test.gridfs" "test-deleting-file-instance-on-disk-after-storing")
|
||||||
|
_ (spit tmp-file "to be deleted")]
|
||||||
|
(is (= 0 (count (gridfs/all-files fs))))
|
||||||
|
(store-file (make-input-file fs tmp-file)
|
||||||
|
(filename "test-deleting-file-instance-on-disk-after-storing")
|
||||||
|
(content-type "application/octet-stream"))
|
||||||
|
(is (= 1 (count (gridfs/all-files fs))))
|
||||||
|
(is (.delete tmp-file))))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(deftest ^{:gridfs true} test-storing-files-to-gridfs-using-relative-fs-paths
|
(deftest ^{:gridfs true} test-finding-individual-files-on-gridfs
|
||||||
(let [input "./test/resources/mongo/js/mapfun1.js"]
|
(testing "gridfs/find-one"
|
||||||
(is (= 0 (count (gridfs/all-files))))
|
(purge-gridfs*)
|
||||||
(store (make-input-file input)
|
(let [input "./test/resources/mongo/js/mapfun1.js"
|
||||||
(.setFilename "monger.test.gridfs.file1")
|
ct "binary/octet-stream"
|
||||||
(.setContentType "application/octet-stream"))
|
fname "monger.test.gridfs.file5"
|
||||||
(is (= 1 (count (gridfs/all-files))))))
|
md5 "14a09deabb50925a3381315149017bbd"
|
||||||
|
stored (store-file (make-input-file fs input)
|
||||||
|
(filename fname)
|
||||||
|
(content-type ct))]
|
||||||
|
(is (= 1 (count (gridfs/all-files fs))))
|
||||||
|
(is (:_id stored))
|
||||||
|
(is (:uploadDate stored))
|
||||||
|
(is (= 62 (:length stored)))
|
||||||
|
(is (= md5 (:md5 stored)))
|
||||||
|
(is (= fname (:filename stored)))
|
||||||
|
(is (= ct (:contentType stored)))
|
||||||
|
(are [a b] (is (= a (:md5 (from-db-object (gridfs/find-one fs b) true))))
|
||||||
|
md5 {:_id (:_id stored)}
|
||||||
|
md5 (to-db-object {:md5 md5}))))
|
||||||
|
(testing "gridfs/find-one-as-map"
|
||||||
|
(purge-gridfs*)
|
||||||
|
(let [input "./test/resources/mongo/js/mapfun1.js"
|
||||||
|
ct "binary/octet-stream"
|
||||||
|
fname "monger.test.gridfs.file6"
|
||||||
|
md5 "14a09deabb50925a3381315149017bbd"
|
||||||
|
stored (store-file (make-input-file fs input)
|
||||||
|
(filename fname)
|
||||||
|
(metadata (to-db-object {:meta "data"}))
|
||||||
|
(content-type ct))]
|
||||||
|
(is (= 1 (count (gridfs/all-files fs))))
|
||||||
|
(is (:_id stored))
|
||||||
|
(is (:uploadDate stored))
|
||||||
|
(is (= 62 (:length stored)))
|
||||||
|
(is (= md5 (:md5 stored)))
|
||||||
|
(is (= fname (:filename stored)))
|
||||||
|
(is (= ct (:contentType stored)))
|
||||||
|
(let [m (gridfs/find-one-as-map fs {:filename fname})]
|
||||||
|
(is (= {:meta "data"} (:metadata m))))
|
||||||
|
(are [a query] (is (= a (:md5 (gridfs/find-one-as-map fs query))))
|
||||||
|
md5 {:_id (:_id stored)}
|
||||||
|
md5 {:md5 md5})))
|
||||||
|
(testing "gridfs/find-by-id"
|
||||||
|
(purge-gridfs*)
|
||||||
|
(let [input "./test/resources/mongo/js/mapfun1.js"
|
||||||
|
ct "binary/octet-stream"
|
||||||
|
fname "monger.test.gridfs.file5"
|
||||||
|
md5 "14a09deabb50925a3381315149017bbd"
|
||||||
|
stored (store-file (make-input-file fs input)
|
||||||
|
(filename fname)
|
||||||
|
(content-type ct))]
|
||||||
|
(is (= 1 (count (gridfs/all-files fs))))
|
||||||
|
(is (:_id stored))
|
||||||
|
(is (:uploadDate stored))
|
||||||
|
(is (= 62 (:length stored)))
|
||||||
|
(is (= md5 (:md5 stored)))
|
||||||
|
(is (= fname (:filename stored)))
|
||||||
|
(is (= ct (:contentType stored)))
|
||||||
|
(are [a id] (is (= a (:md5 (from-db-object (gridfs/find-by-id fs id) true))))
|
||||||
|
md5 (:_id stored))))
|
||||||
|
(testing "gridfs/find-map-by-id"
|
||||||
|
(purge-gridfs*)
|
||||||
|
(let [input "./test/resources/mongo/js/mapfun1.js"
|
||||||
|
ct "binary/octet-stream"
|
||||||
|
fname "monger.test.gridfs.file6"
|
||||||
|
md5 "14a09deabb50925a3381315149017bbd"
|
||||||
|
stored (store-file (make-input-file fs input)
|
||||||
|
(filename fname)
|
||||||
|
(metadata (to-db-object {:meta "data"}))
|
||||||
|
(content-type ct))]
|
||||||
|
(is (= 1 (count (gridfs/all-files fs))))
|
||||||
|
(is (:_id stored))
|
||||||
|
(is (:uploadDate stored))
|
||||||
|
(is (= 62 (:length stored)))
|
||||||
|
(is (= md5 (:md5 stored)))
|
||||||
|
(is (= fname (:filename stored)))
|
||||||
|
(is (= ct (:contentType stored)))
|
||||||
|
(let [m (gridfs/find-map-by-id fs (:_id stored))]
|
||||||
|
(is (= {:meta "data"} (:metadata m))))
|
||||||
|
(are [a id] (is (= a (:md5 (gridfs/find-map-by-id fs id))))
|
||||||
|
md5 (:_id stored)))))
|
||||||
|
|
||||||
|
(deftest ^{:gridfs true} test-finding-multiple-files-on-gridfs
|
||||||
(deftest ^{:gridfs true} test-storing-files-to-gridfs-using-file-instances
|
|
||||||
(let [input (io/as-file "./test/resources/mongo/js/mapfun1.js")]
|
|
||||||
(is (= 0 (count (gridfs/all-files))))
|
|
||||||
(store-file (make-input-file input)
|
|
||||||
(filename "monger.test.gridfs.file2")
|
|
||||||
(content-type "application/octet-stream"))
|
|
||||||
(is (= 1 (count (gridfs/all-files))))))
|
|
||||||
|
|
||||||
(deftest ^{:gridfs true} test-storing-bytes-to-gridfs
|
|
||||||
(let [input (.getBytes "A string")
|
|
||||||
md {:format "raw" :source "AwesomeCamera D95"}
|
|
||||||
fname "monger.test.gridfs.file3"
|
|
||||||
ct "application/octet-stream"]
|
|
||||||
(is (= 0 (count (gridfs/all-files))))
|
|
||||||
(store-file (make-input-file input)
|
|
||||||
(filename fname)
|
|
||||||
(metadata md)
|
|
||||||
(content-type "application/octet-stream"))
|
|
||||||
(let [f (first (gridfs/files-as-maps))]
|
|
||||||
(is (= ct (:contentType f)))
|
|
||||||
(is (= fname (:filename f)))
|
|
||||||
(is (= md (:metadata f))))
|
|
||||||
(is (= 1 (count (gridfs/all-files))))))
|
|
||||||
|
|
||||||
(deftest ^{:gridfs true} test-storing-files-to-gridfs-using-absolute-fs-paths
|
|
||||||
(let [tmp-file (File/createTempFile "monger.test.gridfs" "test-storing-files-to-gridfs-using-absolute-fs-paths")
|
|
||||||
_ (spit tmp-file "Some content")
|
|
||||||
input (.getAbsolutePath tmp-file)]
|
|
||||||
(is (= 0 (count (gridfs/all-files))))
|
|
||||||
(store-file (make-input-file input)
|
|
||||||
(filename "monger.test.gridfs.file4")
|
|
||||||
(content-type "application/octet-stream"))
|
|
||||||
(is (= 1 (count (gridfs/all-files))))))
|
|
||||||
|
|
||||||
(deftest ^{:gridfs true} test-storing-files-to-gridfs-using-input-stream
|
|
||||||
(let [tmp-file (File/createTempFile "monger.test.gridfs" "test-storing-files-to-gridfs-using-input-stream")
|
|
||||||
_ (spit tmp-file "Some other content")]
|
|
||||||
(is (= 0 (count (gridfs/all-files))))
|
|
||||||
(store-file (make-input-file (FileInputStream. tmp-file))
|
|
||||||
(filename "monger.test.gridfs.file4b")
|
|
||||||
(content-type "application/octet-stream"))
|
|
||||||
(is (= 1 (count (gridfs/all-files))))))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(deftest ^{:gridfs true} test-finding-individual-files-on-gridfs
|
|
||||||
(testing "gridfs/find-one"
|
|
||||||
(purge-gridfs*)
|
|
||||||
(let [input "./test/resources/mongo/js/mapfun1.js"
|
|
||||||
ct "binary/octet-stream"
|
|
||||||
fname "monger.test.gridfs.file5"
|
|
||||||
md5 "14a09deabb50925a3381315149017bbd"
|
|
||||||
stored (store-file (make-input-file input)
|
|
||||||
(filename fname)
|
|
||||||
(content-type ct))]
|
|
||||||
(is (= 1 (count (gridfs/all-files))))
|
|
||||||
(is (:_id stored))
|
|
||||||
(is (:uploadDate stored))
|
|
||||||
(is (= 62 (:length stored)))
|
|
||||||
(is (= md5 (:md5 stored)))
|
|
||||||
(is (= fname (:filename stored)))
|
|
||||||
(is (= ct (:contentType stored)))
|
|
||||||
(are [a b] (is (= a (:md5 (from-db-object (gridfs/find-one b) true))))
|
|
||||||
md5 (:_id stored)
|
|
||||||
md5 fname
|
|
||||||
md5 (to-db-object {:md5 md5}))))
|
|
||||||
(testing "gridfs/find-one-as-map"
|
|
||||||
(purge-gridfs*)
|
|
||||||
(let [input "./test/resources/mongo/js/mapfun1.js"
|
(let [input "./test/resources/mongo/js/mapfun1.js"
|
||||||
ct "binary/octet-stream"
|
ct "binary/octet-stream"
|
||||||
fname "monger.test.gridfs.file6"
|
|
||||||
md5 "14a09deabb50925a3381315149017bbd"
|
md5 "14a09deabb50925a3381315149017bbd"
|
||||||
stored (store-file (make-input-file input)
|
stored1 (store-file (make-input-file fs input)
|
||||||
(filename fname)
|
(filename "monger.test.gridfs.file6")
|
||||||
(metadata (to-db-object {:meta "data"}))
|
(content-type ct))
|
||||||
(content-type ct))]
|
stored2 (store-file (make-input-file fs input)
|
||||||
(is (= 1 (count (gridfs/all-files))))
|
(filename "monger.test.gridfs.file7")
|
||||||
(is (:_id stored))
|
(content-type ct))
|
||||||
(is (:uploadDate stored))
|
list1 (gridfs/find-by-filename fs "monger.test.gridfs.file6")
|
||||||
(is (= 62 (:length stored)))
|
list2 (gridfs/find-by-filename fs "monger.test.gridfs.file7")
|
||||||
(is (= md5 (:md5 stored)))
|
list3 (gridfs/find-by-filename fs "888000___.monger.test.gridfs.file")
|
||||||
(is (= fname (:filename stored)))
|
list4 (gridfs/find-by-md5 fs md5)]
|
||||||
(is (= ct (:contentType stored)))
|
(is (= 2 (count (gridfs/all-files fs))))
|
||||||
(let [m (gridfs/find-one-as-map {:filename fname})]
|
(are [a b] (is (= (map #(.get ^GridFSDBFile % "_id") a)
|
||||||
(is (= {:meta "data"} (:metadata m))))
|
(map :_id b)))
|
||||||
(are [a query] (is (= a (:md5 (gridfs/find-one-as-map query))))
|
list1 [stored1]
|
||||||
md5 (:_id stored)
|
list2 [stored2]
|
||||||
md5 fname
|
list3 []
|
||||||
md5 {:md5 md5}))))
|
list4 [stored1 stored2])))
|
||||||
|
|
||||||
(deftest ^{:gridfs true} test-finding-multiple-files-on-gridfs
|
|
||||||
(let [input "./test/resources/mongo/js/mapfun1.js"
|
|
||||||
ct "binary/octet-stream"
|
|
||||||
md5 "14a09deabb50925a3381315149017bbd"
|
|
||||||
stored1 (store-file (make-input-file input)
|
|
||||||
(filename "monger.test.gridfs.file6")
|
|
||||||
(content-type ct))
|
|
||||||
stored2 (store-file (make-input-file input)
|
|
||||||
(filename "monger.test.gridfs.file7")
|
|
||||||
(content-type ct))
|
|
||||||
list1 (gridfs/find "monger.test.gridfs.file6")
|
|
||||||
list2 (gridfs/find "monger.test.gridfs.file7")
|
|
||||||
list3 (gridfs/find "888000___.monger.test.gridfs.file")
|
|
||||||
list4 (gridfs/find { :md5 md5 })]
|
|
||||||
(is (= 2 (count (gridfs/all-files))))
|
|
||||||
(are [a b] (is (= (map #(.get ^GridFSDBFile % "_id") a)
|
|
||||||
(map :_id b)))
|
|
||||||
list1 [stored1]
|
|
||||||
list2 [stored2]
|
|
||||||
list3 []
|
|
||||||
list4 [stored1 stored2])))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest ^{:gridfs true} test-removing-multiple-files-from-gridfs
|
(deftest ^{:gridfs true} test-removing-multiple-files-from-gridfs
|
||||||
(let [input "./test/resources/mongo/js/mapfun1.js"
|
(let [input "./test/resources/mongo/js/mapfun1.js"
|
||||||
ct "binary/octet-stream"
|
ct "binary/octet-stream"
|
||||||
md5 "14a09deabb50925a3381315149017bbd"
|
md5 "14a09deabb50925a3381315149017bbd"
|
||||||
stored1 (store-file (make-input-file input)
|
stored1 (store-file (make-input-file fs input)
|
||||||
(filename "monger.test.gridfs.file8")
|
(filename "monger.test.gridfs.file8")
|
||||||
(content-type ct))
|
(content-type ct))
|
||||||
stored2 (store-file (make-input-file input)
|
stored2 (store-file (make-input-file fs input)
|
||||||
(filename "monger.test.gridfs.file9")
|
(filename "monger.test.gridfs.file9")
|
||||||
(content-type ct))]
|
(content-type ct))]
|
||||||
(is (= 2 (count (gridfs/all-files))))
|
(is (= 2 (count (gridfs/all-files fs))))
|
||||||
(gridfs/remove { :filename "monger.test.gridfs.file8" })
|
(gridfs/remove fs { :filename "monger.test.gridfs.file8" })
|
||||||
(is (= 1 (count (gridfs/all-files))))
|
(is (= 1 (count (gridfs/all-files fs))))
|
||||||
(gridfs/remove { :md5 md5 })
|
(gridfs/remove fs { :md5 md5 })
|
||||||
(is (= 0 (count (gridfs/all-files))))))
|
(is (= 0 (count (gridfs/all-files fs)))))))
|
||||||
|
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
(ns monger.test.helper
|
|
||||||
(:require [monger core util])
|
|
||||||
(:import [com.mongodb WriteConcern]))
|
|
||||||
|
|
||||||
(def connected (atom false))
|
|
||||||
(defn connected?
|
|
||||||
[]
|
|
||||||
@connected)
|
|
||||||
|
|
||||||
(defn connect!
|
|
||||||
[]
|
|
||||||
(when-not (connected?)
|
|
||||||
(do
|
|
||||||
(monger.core/connect!)
|
|
||||||
(monger.core/set-db! (monger.core/get-db "monger-test"))
|
|
||||||
(monger.core/set-default-write-concern! WriteConcern/SAFE)
|
|
||||||
(reset! connected true))))
|
|
||||||
|
|
@ -1,33 +1,49 @@
|
||||||
(ns monger.test.indexing-test
|
(ns monger.test.indexing-test
|
||||||
(:import org.bson.types.ObjectId
|
(:import org.bson.types.ObjectId
|
||||||
java.util.Date)
|
java.util.Date)
|
||||||
(:require [monger core util]
|
(:require [monger.core :as mg]
|
||||||
[monger.collection :as mc]
|
[monger.collection :as mc]
|
||||||
[monger.test.helper :as helper])
|
monger.joda-time
|
||||||
(:use clojure.test
|
[clojure.test :refer :all]
|
||||||
[monger operators conversion]
|
[clj-time.core :refer [now seconds ago from-now]]))
|
||||||
monger.test.fixtures))
|
|
||||||
|
|
||||||
(helper/connect!)
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")]
|
||||||
|
(deftest ^{:indexing true} test-creating-and-dropping-indexes
|
||||||
|
(let [collection "libraries"]
|
||||||
|
(mc/drop-indexes db collection)
|
||||||
|
(mc/create-index db collection {"language" 1})
|
||||||
|
(is (= "language_1"
|
||||||
|
(:name (second (mc/indexes-on db collection)))))
|
||||||
|
(mc/drop-indexes db collection)
|
||||||
|
(is (nil? (second (mc/indexes-on db collection))))
|
||||||
|
(mc/ensure-index db collection (array-map "language" 1) {:unique true})
|
||||||
|
(is (= "language_1"
|
||||||
|
(:name (second (mc/indexes-on db collection)))))
|
||||||
|
(mc/drop-indexes db collection)
|
||||||
|
(mc/ensure-index db collection (array-map "language" 1))
|
||||||
|
(mc/drop-indexes db collection)
|
||||||
|
(mc/ensure-index db collection (array-map "language" 1) {:unique true})
|
||||||
|
(mc/drop-indexes db collection)
|
||||||
|
(mc/ensure-index db collection (array-map "language" 1) "index-name" true)
|
||||||
|
(mc/drop-indexes db collection)))
|
||||||
|
|
||||||
|
(deftest ^{:indexing true :time-consuming true} test-ttl-collections
|
||||||
;;
|
(let [coll "recent_events"
|
||||||
;; indexes
|
ttl 15
|
||||||
;;
|
sleep 65]
|
||||||
|
(mc/remove db coll)
|
||||||
(deftest ^{:indexing true} test-creating-and-dropping-indexes
|
(mc/drop-indexes db coll)
|
||||||
(let [collection "libraries"]
|
(mc/ensure-index db coll (array-map :created-at 1) {:expireAfterSeconds ttl})
|
||||||
(mc/drop-indexes collection)
|
(dotimes [i 100]
|
||||||
(mc/create-index collection { "language" 1 })
|
(mc/insert db coll {:type "signup" :created-at (-> i seconds ago) :i i}))
|
||||||
(is (= "language_1"
|
(dotimes [i 100]
|
||||||
(:name (second (mc/indexes-on collection)))))
|
(mc/insert db coll {:type "signup" :created-at (-> i seconds from-now) :i i}))
|
||||||
(mc/drop-index collection "language_1")
|
(is (= 200 (mc/count db coll {:type "signup"})))
|
||||||
(mc/create-index collection ["language"])
|
;; sleep for > 60 seconds. MongoDB seems to run TTLMonitor once per minute, according to
|
||||||
(mc/drop-index collection "language_1")
|
;; the log.
|
||||||
(is (nil? (second (mc/indexes-on collection))))
|
(println (format "Now sleeping for %d seconds to test TTL collections!" sleep))
|
||||||
(mc/ensure-index collection { "language" 1 } {:unique true})
|
(Thread/sleep (* sleep 1000))
|
||||||
(is (= "language_1"
|
(println (format "Documents in the TTL collection: %d" (mc/count db coll {:type "signup"})))
|
||||||
(:name (second (mc/indexes-on collection)))))
|
(is (< (mc/count db coll {:type "signup"}) 100))
|
||||||
(mc/ensure-index collection { "language" 1 })
|
(mc/remove db coll))))
|
||||||
(mc/ensure-index collection { "language" 1 } { :unique true })
|
|
||||||
(mc/drop-indexes collection)))
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,3 @@
|
||||||
(set! *warn-on-reflection* true)
|
|
||||||
|
|
||||||
(ns monger.test.inserting-test
|
(ns monger.test.inserting-test
|
||||||
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject DBRef]
|
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject DBRef]
|
||||||
org.bson.types.ObjectId
|
org.bson.types.ObjectId
|
||||||
|
|
@ -7,119 +5,176 @@
|
||||||
(:require [monger.core :as mg]
|
(:require [monger.core :as mg]
|
||||||
[monger.util :as mu]
|
[monger.util :as mu]
|
||||||
[monger.collection :as mc]
|
[monger.collection :as mc]
|
||||||
[monger.test.helper :as helper])
|
[clojure.test :refer :all]
|
||||||
(:use clojure.test
|
[monger.operators :refer :all]
|
||||||
monger.operators
|
[monger.conversion :refer :all]))
|
||||||
monger.conversion
|
|
||||||
monger.test.fixtures))
|
|
||||||
|
|
||||||
(helper/connect!)
|
|
||||||
|
|
||||||
(use-fixtures :each purge-people purge-docs purge-things purge-libraries)
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; insert
|
|
||||||
;;
|
|
||||||
|
|
||||||
(deftest insert-a-basic-document-without-id-and-with-default-write-concern
|
|
||||||
(let [collection "people"
|
|
||||||
doc {:name "Joe" :age 30}]
|
|
||||||
(is (monger.result/ok? (mc/insert "people" doc)))
|
|
||||||
(is (= 1 (mc/count collection)))))
|
|
||||||
|
|
||||||
(deftest insert-a-basic-document-with-explicitly-passed-database-without-id-and-with-default-write-concern
|
|
||||||
(let [collection "people"
|
|
||||||
doc {:name "Joe" :age 30}]
|
|
||||||
(dotimes [n 5]
|
|
||||||
(is (monger.result/ok? (mc/insert monger.core/*mongodb-database* "people" doc WriteConcern/SAFE))))
|
|
||||||
(is (= 5 (mc/count collection)))))
|
|
||||||
|
|
||||||
(deftest insert-a-basic-document-without-id-and-with-explicit-write-concern
|
|
||||||
(let [collection "people"
|
|
||||||
doc {:name "Joe" :age 30}]
|
|
||||||
(is (monger.result/ok? (mc/insert "people" doc WriteConcern/SAFE)))
|
|
||||||
(is (= 1 (mc/count collection)))))
|
|
||||||
|
|
||||||
(deftest insert-a-basic-db-object-without-id-and-with-default-write-concern
|
|
||||||
(let [collection "people"
|
|
||||||
doc (to-db-object {:name "Joe" :age 30})]
|
|
||||||
(is (nil? (.get ^DBObject doc "_id")))
|
|
||||||
(mc/insert "people" doc)
|
|
||||||
(is (not (nil? (monger.util/get-id doc))))))
|
|
||||||
|
|
||||||
(deftest insert-a-map-with-id-and-with-default-write-concern
|
|
||||||
(let [collection "people"
|
|
||||||
id (ObjectId.)
|
|
||||||
doc {:name "Joe" :age 30 "_id" id}
|
|
||||||
result (mc/insert "people" doc)]
|
|
||||||
(is (= id (monger.util/get-id doc)))))
|
|
||||||
|
|
||||||
(deftest insert-a-document-with-clojure-ratio-in-it
|
|
||||||
(let [collection "widgets"
|
|
||||||
id (ObjectId.)
|
|
||||||
doc {:ratio 11/2 "_id" id}
|
|
||||||
result (mc/insert "widgets" doc)]
|
|
||||||
(is (= 5.5 (:ratio (mc/find-map-by-id collection id))))))
|
|
||||||
|
|
||||||
(deftest insert-a-document-with-clojure-keyword-in-it
|
|
||||||
(let [collection "widgets"
|
|
||||||
id (ObjectId.)
|
|
||||||
doc {:keyword :kwd "_id" id}
|
|
||||||
result (mc/insert "widgets" doc)]
|
|
||||||
(is (= (name :kwd) (:keyword (mc/find-map-by-id collection id))))))
|
|
||||||
|
|
||||||
(deftest insert-a-document-with-clojure-keyword-in-a-set-in-it
|
|
||||||
(let [collection "widgets"
|
|
||||||
id (ObjectId.)
|
|
||||||
doc {:keyword1 {:keyword2 #{:kw1 :kw2}} "_id" id}
|
|
||||||
result (mc/insert "widgets" doc)]
|
|
||||||
(is (= (sort ["kw1" "kw2"])
|
|
||||||
(sort (get-in (mc/find-map-by-id collection id) [:keyword1 :keyword2]))))))
|
|
||||||
|
|
||||||
|
|
||||||
(defrecord Metrics
|
(defrecord Metrics
|
||||||
[rps eps])
|
[rps eps])
|
||||||
|
|
||||||
(deftest insert-a-document-with-clojure-record-in-it
|
(let [conn (mg/connect)
|
||||||
(let [collection "widgets"
|
db (mg/get-db conn "monger-test")]
|
||||||
id (ObjectId.)
|
(defn purge-collections
|
||||||
doc {:record (Metrics. 10 20) "_id" id}
|
[f]
|
||||||
result (mc/insert "widgets" doc)]
|
(mc/remove db "people")
|
||||||
(is (= {:rps 10 :eps 20} (:record (mc/find-map-by-id collection id))))))
|
(mc/remove db "docs")
|
||||||
|
(mc/remove db "things")
|
||||||
|
(mc/remove db "widgets")
|
||||||
|
(f)
|
||||||
|
(mc/remove db "people")
|
||||||
|
(mc/remove db "docs")
|
||||||
|
(mc/remove db "things")
|
||||||
|
(mc/remove db "widgets"))
|
||||||
|
|
||||||
(deftest test-insert-a-document-with-dbref
|
(use-fixtures :each purge-collections)
|
||||||
(let [coll1 "widgets"
|
|
||||||
coll2 "owners"
|
|
||||||
oid (ObjectId.)
|
|
||||||
joe (mc/insert "owners" {:name "Joe" :_id oid})
|
|
||||||
dbref (DBRef. (mg/current-db) coll2 oid)]
|
|
||||||
(mc/insert coll1 {:type "pentagon" :owner dbref})
|
|
||||||
(let [fetched (mc/find-one-as-map coll1 {:type "pentagon"})
|
|
||||||
fo (:owner fetched)]
|
|
||||||
(is (= {:_id oid :name "Joe"} (from-db-object @fo true)))
|
|
||||||
(is (= dbref fo)))))
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; insert-batch
|
;; insert
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(deftest insert-a-batch-of-basic-documents-without-ids-and-with-default-write-concern
|
(deftest insert-a-basic-document-without-id-and-with-default-write-concern
|
||||||
(let [collection "people"
|
(let [collection "people"
|
||||||
docs [{:name "Joe" :age 30} {:name "Paul" :age 27}]]
|
doc {:name "Joe" :age 30}]
|
||||||
(is (monger.result/ok? (mc/insert-batch "people" docs)))
|
(is (mc/insert db collection doc))
|
||||||
(is (= 2 (mc/count collection)))))
|
(is (= 1 (mc/count db collection)))))
|
||||||
|
|
||||||
(deftest insert-a-batch-of-basic-documents-without-ids-and-with-explicit-write-concern
|
(deftest insert-a-basic-document-with-explicitly-passed-database-without-id-and-with-default-write-concern
|
||||||
(let [collection "people"
|
(let [collection "people"
|
||||||
docs [{:name "Joe" :age 30} {:name "Paul" :age 27}]]
|
doc {:name "Joe" :age 30}]
|
||||||
(is (monger.result/ok? (mc/insert-batch "people" docs WriteConcern/NORMAL)))
|
(dotimes [n 5]
|
||||||
(is (= 2 (mc/count collection)))))
|
(mc/insert db collection doc WriteConcern/SAFE))
|
||||||
|
(is (= 5 (mc/count db collection)))))
|
||||||
|
|
||||||
(deftest insert-a-batch-of-basic-documents-with-explicit-database-without-ids-and-with-explicit-write-concern
|
(deftest insert-a-basic-document-without-id-and-with-explicit-write-concern
|
||||||
(let [collection "people"
|
(let [collection "people"
|
||||||
docs [{:name "Joe" :age 30} {:name "Paul" :age 27}]]
|
doc {:name "Joe" :age 30}]
|
||||||
(dotimes [n 44]
|
(is (mc/insert db collection doc WriteConcern/SAFE))
|
||||||
(is (monger.result/ok? (mc/insert-batch monger.core/*mongodb-database* "people" docs WriteConcern/NORMAL))))
|
(is (= 1 (mc/count db collection)))))
|
||||||
(is (= 88 (mc/count collection)))))
|
|
||||||
|
(deftest insert-a-basic-db-object-without-id-and-with-default-write-concern
|
||||||
|
(let [collection "people"
|
||||||
|
doc (to-db-object {:name "Joe" :age 30})]
|
||||||
|
(is (nil? (.get ^DBObject doc "_id")))
|
||||||
|
(mc/insert db collection doc)
|
||||||
|
(is (not (nil? (monger.util/get-id doc))))))
|
||||||
|
|
||||||
|
(deftest insert-a-map-with-id-and-with-default-write-concern
|
||||||
|
(let [collection "people"
|
||||||
|
id (ObjectId.)
|
||||||
|
doc {:name "Joe" :age 30 "_id" id}
|
||||||
|
result (mc/insert db collection doc)]
|
||||||
|
(is (= id (monger.util/get-id doc)))))
|
||||||
|
|
||||||
|
(deftest insert-a-document-with-clojure-ratio-in-it
|
||||||
|
(let [collection "widgets"
|
||||||
|
id (ObjectId.)
|
||||||
|
doc {:ratio 11/2 "_id" id}
|
||||||
|
result (mc/insert db collection doc)]
|
||||||
|
(is (= 5.5 (:ratio (mc/find-map-by-id db collection id))))))
|
||||||
|
|
||||||
|
(deftest insert-a-document-with-clojure-keyword-in-it
|
||||||
|
(let [collection "widgets"
|
||||||
|
id (ObjectId.)
|
||||||
|
doc {:keyword :kwd "_id" id}
|
||||||
|
result (mc/insert db collection doc)]
|
||||||
|
(is (= (name :kwd) (:keyword (mc/find-map-by-id db collection id))))))
|
||||||
|
|
||||||
|
(deftest insert-a-document-with-clojure-keyword-in-a-set-in-it
|
||||||
|
(let [collection "widgets"
|
||||||
|
id (ObjectId.)
|
||||||
|
doc {:keyword1 {:keyword2 #{:kw1 :kw2}} "_id" id}
|
||||||
|
result (mc/insert db collection doc)]
|
||||||
|
(is (= (sort ["kw1" "kw2"])
|
||||||
|
(sort (get-in (mc/find-map-by-id db collection id) [:keyword1 :keyword2]))))))
|
||||||
|
|
||||||
|
(deftest insert-a-document-with-clojure-record-in-it
|
||||||
|
(let [collection "widgets"
|
||||||
|
id (ObjectId.)
|
||||||
|
doc {:record (Metrics. 10 20) "_id" id}
|
||||||
|
result (mc/insert db collection doc)]
|
||||||
|
(is (= {:rps 10 :eps 20} (:record (mc/find-map-by-id db collection id))))))
|
||||||
|
|
||||||
|
;; TODO: disabled until we figure out how to implement dereferencing of DBRefs
|
||||||
|
;; in 3.0 in a compatible way (and if that's possible at all). MK.
|
||||||
|
#_ (deftest test-insert-a-document-with-dbref
|
||||||
|
(mc/remove db "widgets")
|
||||||
|
(mc/remove db "owners")
|
||||||
|
(let [coll1 "widgets"
|
||||||
|
coll2 "owners"
|
||||||
|
oid (ObjectId.)
|
||||||
|
joe (mc/insert db coll2 {:name "Joe" :_id oid})
|
||||||
|
dbref (DBRef. coll2 oid)]
|
||||||
|
(mc/insert db coll1 {:type "pentagon" :owner dbref})
|
||||||
|
(let [fetched (mc/find-one-as-map db coll1 {:type "pentagon"})
|
||||||
|
fo (:owner fetched)]
|
||||||
|
(is (= {:_id oid :name "Joe"} (from-db-object @fo true))))))
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; insert-and-return
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest insert-and-return-a-basic-document-without-id-and-with-default-write-concern
|
||||||
|
(let [collection "people"
|
||||||
|
doc {:name "Joe" :age 30}
|
||||||
|
result (mc/insert-and-return db collection doc)]
|
||||||
|
(is (= (:name doc)
|
||||||
|
(:name result)))
|
||||||
|
(is (= (:age doc)
|
||||||
|
(:age result)))
|
||||||
|
(is (:_id result))
|
||||||
|
(is (= 1 (mc/count db collection)))))
|
||||||
|
|
||||||
|
(deftest insert-and-return-a-basic-document-without-id-but-with-a-write-concern
|
||||||
|
(let [collection "people"
|
||||||
|
doc {:name "Joe" :age 30 :ratio 3/4}
|
||||||
|
result (mc/insert-and-return db collection doc WriteConcern/FSYNC_SAFE)]
|
||||||
|
(is (= (:name doc)
|
||||||
|
(:name result)))
|
||||||
|
(is (= (:age doc)
|
||||||
|
(:age result)))
|
||||||
|
(is (= (:ratio doc)
|
||||||
|
(:ratio result)))
|
||||||
|
(is (:_id result))
|
||||||
|
(is (= 1 (mc/count db collection)))))
|
||||||
|
|
||||||
|
(deftest insert-and-return-with-a-provided-id
|
||||||
|
(let [collection "people"
|
||||||
|
oid (ObjectId.)
|
||||||
|
doc {:name "Joe" :age 30 :_id oid}
|
||||||
|
result (mc/insert-and-return db collection doc)]
|
||||||
|
(is (= (:_id result) (:_id doc) oid))
|
||||||
|
(is (= 1 (mc/count db collection)))))
|
||||||
|
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; insert-batch
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest insert-a-batch-of-basic-documents-without-ids-and-with-default-write-concern
|
||||||
|
(let [collection "people"
|
||||||
|
docs [{:name "Joe" :age 30} {:name "Paul" :age 27}]]
|
||||||
|
(is (mc/insert-batch db collection docs))
|
||||||
|
(is (= 2 (mc/count db collection)))))
|
||||||
|
|
||||||
|
(deftest insert-a-batch-of-basic-documents-without-ids-and-with-explicit-write-concern
|
||||||
|
(let [collection "people"
|
||||||
|
docs [{:name "Joe" :age 30} {:name "Paul" :age 27}]]
|
||||||
|
(is (mc/insert-batch db collection docs WriteConcern/FSYNCED))
|
||||||
|
(is (= 2 (mc/count db collection)))))
|
||||||
|
|
||||||
|
(deftest insert-a-batch-of-basic-documents-with-explicit-database-without-ids-and-with-explicit-write-concern
|
||||||
|
(let [collection "people"
|
||||||
|
docs [{:name "Joe" :age 30} {:name "Paul" :age 27}]]
|
||||||
|
(dotimes [n 44]
|
||||||
|
(is (mc/insert-batch db collection docs WriteConcern/FSYNCED)))
|
||||||
|
(is (= 88 (mc/count db collection)))))
|
||||||
|
|
||||||
|
(deftest insert-a-batch-of-basic-documents-from-a-lazy-sequence
|
||||||
|
(let [collection "people"
|
||||||
|
numbers (range 0 1000)]
|
||||||
|
(is (mc/insert-batch db collection (map (fn [^long l]
|
||||||
|
{:n l})
|
||||||
|
numbers)))
|
||||||
|
(is (= (count numbers) (mc/count db collection))))))
|
||||||
|
|
|
||||||
|
|
@ -1,45 +0,0 @@
|
||||||
(ns monger.test.internal.fn-test
|
|
||||||
(:use clojure.test
|
|
||||||
monger.internal.fn))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-expand-all
|
|
||||||
(are [i o] (is (= (expand-all i) o))
|
|
||||||
{ :int (fn [] 1) :str "Clojure" :float (Float/valueOf 11.0) } { :int 1 :str "Clojure" :float (Float/valueOf 11.0 )}
|
|
||||||
{ :long (fn [] (Long/valueOf 11)) } { :long (Long/valueOf 11) }
|
|
||||||
{
|
|
||||||
:i 1
|
|
||||||
:l (Long/valueOf 1111)
|
|
||||||
:s "Clojure"
|
|
||||||
:d (Double/valueOf 11.1)
|
|
||||||
:f (Float/valueOf 2.5)
|
|
||||||
:v [1 2 3]
|
|
||||||
:dyn-v [(fn [] 10) (fn [] 20) (fn [] 30)]
|
|
||||||
:dyn-i (fn [] 1)
|
|
||||||
:dyn-s (fn [] "Clojure (expanded)")
|
|
||||||
:m { :nested "String" }
|
|
||||||
:dyn-m { :abc (fn [] :abc) :nested { :a { :b { :c (fn [] "d") } } } }
|
|
||||||
}
|
|
||||||
{
|
|
||||||
:i 1
|
|
||||||
:l (Long/valueOf 1111)
|
|
||||||
:s "Clojure"
|
|
||||||
:d (Double/valueOf 11.1)
|
|
||||||
:f (Float/valueOf 2.5)
|
|
||||||
:v [1 2 3]
|
|
||||||
:dyn-v [10 20 30]
|
|
||||||
:dyn-i 1
|
|
||||||
:dyn-s "Clojure (expanded)"
|
|
||||||
:m { :nested "String" }
|
|
||||||
:dyn-m {
|
|
||||||
:abc :abc
|
|
||||||
:nested { :a { :b { :c "d" } } }
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
|
|
||||||
(deftest test-expand-all-with
|
|
||||||
(let [expander-fn (fn [f]
|
|
||||||
(* 3 (f)))]
|
|
||||||
(are [i o] (is (= (expand-all-with i expander-fn) o))
|
|
||||||
{ :a 1 :int (fn [] 3) } { :a 1 :int 9 }
|
|
||||||
{ :v [(fn [] 1) (fn [] 11)] :m { :inner (fn [] 3) } :s "Clojure" } { :v [3 33] :m { :inner 9 } :s "Clojure" })))
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
(ns monger.test.internal.pagination-test
|
(ns monger.test.internal.pagination-test
|
||||||
(:use clojure.test
|
(:require [clojure.test :refer :all]
|
||||||
monger.internal.pagination))
|
[monger.internal.pagination :refer :all]))
|
||||||
|
|
||||||
(deftest test-pagination-offset
|
(deftest test-pagination-offset
|
||||||
(are [a b] (= a b)
|
(are [a b] (= a b)
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,6 @@
|
||||||
(ns monger.test.js-test
|
(ns monger.test.js-test
|
||||||
(:require monger.js
|
(:require monger.js
|
||||||
[monger.test.helper :as helper])
|
[clojure.test :refer :all]))
|
||||||
(:use clojure.test))
|
|
||||||
|
|
||||||
(helper/connect!)
|
|
||||||
|
|
||||||
(deftest load-js-resource-using-path-on-the-classpath
|
(deftest load-js-resource-using-path-on-the-classpath
|
||||||
(are [c path] (= c (count (monger.js/load-resource path)))
|
(are [c path] (= c (count (monger.js/load-resource path)))
|
||||||
|
|
|
||||||
16
test/monger/test/json_cheshire_test.clj
Normal file
16
test/monger/test/json_cheshire_test.clj
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
(ns monger.test.json-cheshire-test
|
||||||
|
(:require [clojure.test :refer :all]
|
||||||
|
[monger.json]
|
||||||
|
[cheshire.core :refer :all])
|
||||||
|
(:import org.bson.types.ObjectId
|
||||||
|
org.bson.types.BSONTimestamp))
|
||||||
|
|
||||||
|
(deftest convert-dbobject-to-json
|
||||||
|
(let [input (ObjectId.)
|
||||||
|
output (generate-string input)]
|
||||||
|
(is (= (str "\"" input "\"") output))))
|
||||||
|
|
||||||
|
(deftest convert-bson-timestamp-to-json
|
||||||
|
(let [input (BSONTimestamp. 123 4)
|
||||||
|
output (generate-string input)]
|
||||||
|
(is (= "{\"time\":123,\"inc\":4}" output))))
|
||||||
16
test/monger/test/json_test.clj
Normal file
16
test/monger/test/json_test.clj
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
(ns monger.test.json-test
|
||||||
|
(:require [clojure.test :refer :all]
|
||||||
|
[monger.json]
|
||||||
|
[clojure.data.json :as json])
|
||||||
|
(:import org.bson.types.ObjectId
|
||||||
|
org.bson.types.BSONTimestamp))
|
||||||
|
|
||||||
|
(deftest convert-dbobject-to-json
|
||||||
|
(let [input (ObjectId.)
|
||||||
|
output (json/write-str input)]
|
||||||
|
(is (= (str "\"" input "\"") output))))
|
||||||
|
|
||||||
|
(deftest convert-bson-timestamp-to-json
|
||||||
|
(let [input (BSONTimestamp. 123 4)
|
||||||
|
output (json/write-str input)]
|
||||||
|
(is (= "{\"time\":123,\"inc\":4}" output))))
|
||||||
|
|
@ -1,36 +1,55 @@
|
||||||
(ns monger.test.lib-integration-test
|
(ns monger.test.lib-integration-test
|
||||||
(:use clojure.test
|
(:import [org.joda.time DateTime DateMidnight LocalDate]
|
||||||
monger.json
|
|
||||||
monger.joda-time
|
|
||||||
monger.conversion)
|
|
||||||
(:import [org.joda.time DateTime DateMidnight]
|
|
||||||
org.bson.types.ObjectId
|
org.bson.types.ObjectId
|
||||||
com.mongodb.DBObject)
|
com.mongodb.DBObject)
|
||||||
(:require [clojure.data.json :as json]
|
(:require monger.json
|
||||||
[clj-time.core :as t]))
|
monger.joda-time
|
||||||
|
[clj-time.core :as t]
|
||||||
|
[cheshire.core :as json]
|
||||||
|
[clojure.test :refer :all]
|
||||||
|
[monger.conversion :refer :all]))
|
||||||
|
|
||||||
|
|
||||||
(deftest serialization-of-joda-datetime-to-json-with-clojure-data-json
|
(deftest ^{:integration true} serialization-of-joda-datetime-to-json
|
||||||
(is (= "\"2011-10-13T23:55:00.000Z\"" (json/json-str (t/date-time 2011 10 13 23 55 0)))))
|
(let [dt (t/date-time 2011 10 13 23 55 0)]
|
||||||
|
(is (= "\"2011-10-13T23:55:00.000Z\""
|
||||||
|
(json/encode dt)))))
|
||||||
|
|
||||||
(deftest serialization-of-object-id-to-json-with-clojure-data-json
|
(deftest ^{:integration true} serialization-of-joda-date-to-json
|
||||||
(is (= "\"4ec2d1a6b55634a935ea4ac8\"" (json/json-str (ObjectId. "4ec2d1a6b55634a935ea4ac8")))))
|
(let [d (.toDate (t/date-time 2011 10 13 23 55 0))]
|
||||||
|
(is (= "\"2011-10-13T23:55:00Z\""
|
||||||
|
(json/encode d)))))
|
||||||
|
|
||||||
|
(deftest ^{:integration true} conversion-of-joda-datetime-to-db-object
|
||||||
(deftest conversion-of-joda-datetime-to-db-object
|
|
||||||
(let [d (to-db-object (t/date-time 2011 10 13 23 55 0))]
|
(let [d (to-db-object (t/date-time 2011 10 13 23 55 0))]
|
||||||
(is (instance? java.util.Date d))
|
(is (instance? java.util.Date d))
|
||||||
(is (= 1318550100000 (.getTime ^java.util.Date d)))))
|
(is (= 1318550100000 (.getTime ^java.util.Date d)))))
|
||||||
|
|
||||||
|
|
||||||
(deftest conversion-of-joda-datemidnight-to-db-object
|
(deftest ^{:integration true} conversion-of-joda-datemidnight-to-db-object
|
||||||
(let [d (to-db-object (DateMidnight. (t/date-time 2011 10 13)))]
|
(let [d (to-db-object (DateMidnight. (t/date-time 2011 10 13)))]
|
||||||
(is (instance? java.util.Date d))
|
(is (instance? java.util.Date d))
|
||||||
(is (= 1318464000000 (.getTime ^java.util.Date d)))))
|
(is (= 1318464000000 (.getTime ^java.util.Date d)))))
|
||||||
|
|
||||||
|
(deftest ^{:integration true} conversion-of-joda-localdate-to-db-object
|
||||||
|
(let [d (to-db-object (LocalDate. 2011 10 13))]
|
||||||
|
(is (instance? java.util.Date d))
|
||||||
|
(is (= 111 (.getYear ^java.util.Date d))) ;; how many years since 1900
|
||||||
|
(is (= 9 (.getMonth ^java.util.Date d))) ;; java.util.Date counts from 0
|
||||||
|
(is (= 13 (.getDate ^java.util.Date d)))))
|
||||||
|
|
||||||
(deftest conversion-of-java-util-date-to-joda-datetime
|
(deftest ^{:integration true} conversion-of-java-util-date-to-joda-datetime
|
||||||
(let [input (.toDate ^DateTime (t/date-time 2011 10 13 23 55 0))
|
(let [input (.toDate ^DateTime (t/date-time 2011 10 13 23 55 0))
|
||||||
output (from-db-object input false)]
|
output (from-db-object input false)]
|
||||||
(is (instance? org.joda.time.DateTime output))
|
(is (instance? org.joda.time.DateTime output))
|
||||||
(is (= input (.toDate ^DateTime output)))))
|
(is (= input (.toDate ^DateTime output)))))
|
||||||
|
|
||||||
|
(deftest ^{:integration true} test-reader-extensions
|
||||||
|
(let [^DateTime d (t/date-time 2011 10 13 23 55 0)]
|
||||||
|
(binding [*print-dup* true]
|
||||||
|
(pr-str d))))
|
||||||
|
|
||||||
|
(deftest ^{:integration true} test-reader-extensions-for-localdate
|
||||||
|
(let [^DateTime d (t/today)]
|
||||||
|
(binding [*print-dup* true]
|
||||||
|
(pr-str d))))
|
||||||
|
|
@ -1,69 +0,0 @@
|
||||||
(ns monger.test.map-reduce-test
|
|
||||||
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject CommandResult$CommandFailure MapReduceOutput MapReduceCommand MapReduceCommand$OutputType]
|
|
||||||
org.bson.types.ObjectId
|
|
||||||
java.util.Date)
|
|
||||||
(:require [monger core util]
|
|
||||||
[monger.collection :as mc]
|
|
||||||
[monger.result :as mgres]
|
|
||||||
[clojurewerkz.support.js :as js]
|
|
||||||
[monger.test.helper :as helper])
|
|
||||||
(:use clojure.test
|
|
||||||
[monger operators conversion]
|
|
||||||
monger.test.fixtures))
|
|
||||||
|
|
||||||
(helper/connect!)
|
|
||||||
|
|
||||||
(use-fixtures :each purge-people purge-docs purge-things purge-libraries)
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
|
||||||
;; Map/Reduce
|
|
||||||
;;
|
|
||||||
|
|
||||||
(let [collection "widgets"
|
|
||||||
mapper (js/load-resource "resources/mongo/js/mapfun1.js")
|
|
||||||
reducer "function(key, values) {
|
|
||||||
var result = 0;
|
|
||||||
values.forEach(function(v) { result += v });
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}"
|
|
||||||
batch [{ :state "CA" :quantity 1 :price 199.00 }
|
|
||||||
{ :state "NY" :quantity 2 :price 199.00 }
|
|
||||||
{ :state "NY" :quantity 1 :price 299.00 }
|
|
||||||
{ :state "IL" :quantity 2 :price 11.50 }
|
|
||||||
{ :state "CA" :quantity 2 :price 2.95 }
|
|
||||||
{ :state "IL" :quantity 3 :price 5.50 }]
|
|
||||||
expected [{:_id "CA", :value 204.9} {:_id "IL", :value 39.5} {:_id "NY", :value 697.0}]]
|
|
||||||
(deftest test-basic-inline-map-reduce-example
|
|
||||||
(mc/remove monger.core/*mongodb-database* collection {})
|
|
||||||
(is (mgres/ok? (mc/insert-batch collection batch)))
|
|
||||||
(let [output (mc/map-reduce collection mapper reducer nil MapReduceCommand$OutputType/INLINE {})
|
|
||||||
results (from-db-object ^DBObject (.results ^MapReduceOutput output) true)]
|
|
||||||
(mgres/ok? output)
|
|
||||||
(is (= expected results))))
|
|
||||||
|
|
||||||
(deftest test-basic-map-reduce-example-that-replaces-named-collection
|
|
||||||
(mc/remove monger.core/*mongodb-database* collection {})
|
|
||||||
(is (mgres/ok? (mc/insert-batch collection batch)))
|
|
||||||
(let [output (mc/map-reduce collection mapper reducer "mr_outputs" {})
|
|
||||||
results (from-db-object ^DBObject (.results ^MapReduceOutput output) true)]
|
|
||||||
(mgres/ok? output)
|
|
||||||
(is (= 3 (monger.core/count results)))
|
|
||||||
(is (= expected
|
|
||||||
(map #(from-db-object % true) (seq results))))
|
|
||||||
(is (= expected
|
|
||||||
(map #(from-db-object % true) (mc/find "mr_outputs"))))
|
|
||||||
(.drop ^MapReduceOutput output)))
|
|
||||||
|
|
||||||
(deftest test-basic-map-reduce-example-that-merged-results-into-named-collection
|
|
||||||
(mc/remove monger.core/*mongodb-database* collection {})
|
|
||||||
(is (mgres/ok? (mc/insert-batch collection batch)))
|
|
||||||
(mc/map-reduce collection mapper reducer "merged_mr_outputs" MapReduceCommand$OutputType/MERGE {})
|
|
||||||
(is (mgres/ok? (mc/insert collection { :state "OR" :price 17.95 :quantity 4 })))
|
|
||||||
(let [^MapReduceOutput output (mc/map-reduce collection mapper reducer "merged_mr_outputs" MapReduceCommand$OutputType/MERGE {})]
|
|
||||||
(mgres/ok? output)
|
|
||||||
(is (= 4 (monger.core/count output)))
|
|
||||||
(is (= ["CA" "IL" "NY" "OR"]
|
|
||||||
(map :_id (mc/find-maps "merged_mr_outputs"))))
|
|
||||||
(.drop ^MapReduceOutput output))))
|
|
||||||
|
|
@ -1,111 +1,146 @@
|
||||||
(set! *warn-on-reflection* true)
|
|
||||||
|
|
||||||
(ns monger.test.query-operators-test
|
(ns monger.test.query-operators-test
|
||||||
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject CommandResult$CommandFailure MapReduceOutput MapReduceCommand MapReduceCommand$OutputType]
|
(:require [monger.core :as mg]
|
||||||
org.bson.types.ObjectId
|
[monger.collection :as mc]
|
||||||
java.util.Date)
|
|
||||||
(:require [monger core util]
|
|
||||||
[clojure stacktrace]
|
|
||||||
[monger.collection :as mgcol]
|
|
||||||
[monger.result :as mgres]
|
|
||||||
[monger.conversion :as mgcnv]
|
|
||||||
[monger.js :as js]
|
[monger.js :as js]
|
||||||
[monger.test.helper :as helper])
|
[clojure.test :refer :all]
|
||||||
(:use [clojure.test]
|
[clojure.set :refer [difference]]
|
||||||
[monger.operators]
|
[monger.operators :refer :all])
|
||||||
[monger.test.fixtures]))
|
(:import [com.mongodb QueryOperators]))
|
||||||
|
|
||||||
(monger.core/connect!)
|
;; (use-fixtures :each purge-people purge-docs purge-things purge-libraries)
|
||||||
(monger.core/set-db! (monger.core/get-db "monger-test"))
|
|
||||||
|
|
||||||
(use-fixtures :each purge-people purge-docs purge-things purge-libraries)
|
(deftest every-query-operator-is-defined
|
||||||
|
(let [driver-query-operators (->> (.getDeclaredFields QueryOperators) (map #(.get % nil)) set)
|
||||||
|
monger-query-operators (->> (ns-publics 'monger.operators) (map (comp name first)) set)
|
||||||
|
; $within is deprecated and replaced by $geoWithin since v2.4.
|
||||||
|
; $uniqueDocs is deprecated since v2.6.
|
||||||
|
deprecated-query-operators #{"$within" "$uniqueDocs"}
|
||||||
|
; Query modifier operators that are deprecated in the mongo shell since v3.2
|
||||||
|
deprecated-meta-operators #{"$comment" "$explain" "$hint" "$maxScan"
|
||||||
|
"$maxTimeMS" "$max" "$min" "$orderby"
|
||||||
|
"$returnKey" "$showDiskLoc" "$snapshot" "$query"}
|
||||||
|
undefined-non-deprecated-operators (difference driver-query-operators
|
||||||
|
deprecated-query-operators
|
||||||
|
deprecated-meta-operators
|
||||||
|
monger-query-operators)]
|
||||||
|
(is (= #{} undefined-non-deprecated-operators))))
|
||||||
|
|
||||||
;;
|
(let [conn (mg/connect)
|
||||||
;; $gt, $gte, $lt, lte
|
db (mg/get-db conn "monger-test")]
|
||||||
;;
|
(defn purge-collections
|
||||||
|
[f]
|
||||||
|
(mc/remove db "people")
|
||||||
|
(mc/remove db "libraries")
|
||||||
|
(f)
|
||||||
|
(mc/remove db "people")
|
||||||
|
(mc/remove db "libraries"))
|
||||||
|
|
||||||
(deftest find-with-conditional-operators-comparison
|
(use-fixtures :each purge-collections)
|
||||||
(let [collection "libraries"]
|
|
||||||
(mgcol/insert-batch collection [{:language "Clojure" :name "monger" :users 1}
|
|
||||||
{:language "Clojure" :name "langohr" :users 5}
|
|
||||||
{:language "Clojure" :name "incanter" :users 15}
|
|
||||||
{:language "Scala" :name "akka" :users 150}])
|
|
||||||
(are [a b] (= a (.count (mgcol/find collection b)))
|
|
||||||
2 {:users {$gt 10}}
|
|
||||||
3 {:users {$gte 5}}
|
|
||||||
2 {:users {$lt 10}}
|
|
||||||
2 {:users {$lte 5}}
|
|
||||||
1 {:users {$gt 10 $lt 150}})))
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; $ne
|
;; $gt, $gte, $lt, lte
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(deftest find-with-and-or-operators
|
(deftest find-with-conditional-operators-comparison
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(mgcol/insert-batch collection [{:language "Ruby" :name "mongoid" :users 1}
|
(mc/insert-batch db collection [{:language "Clojure" :name "monger" :users 1}
|
||||||
{:language "Clojure" :name "langohr" :users 5}
|
{:language "Clojure" :name "langohr" :users 5}
|
||||||
{:language "Clojure" :name "incanter" :users 15}
|
{:language "Clojure" :name "incanter" :users 15}
|
||||||
{:language "Scala" :name "akka" :users 150}])
|
{:language "Scala" :name "akka" :users 150}])
|
||||||
(is (= 2 (.count (mgcol/find collection {$ne {:language "Clojure"}}))))))
|
(are [a b] (= a (.count (mc/find db collection b)))
|
||||||
|
2 {:users {$gt 10}}
|
||||||
|
3 {:users {$gte 5}}
|
||||||
|
2 {:users {$lt 10}}
|
||||||
|
2 {:users {$lte 5}}
|
||||||
|
1 {:users {$gt 10 $lt 150}})))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; $eq
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest find-with-eq-operator
|
||||||
|
(let [collection "libraries"]
|
||||||
|
(mc/insert-batch db collection [{:language "Ruby" :name "mongoid" :users 1 :displayName nil}
|
||||||
|
{:language "Clojure" :name "langohr" :users 5}
|
||||||
|
{:language "Clojure" :name "incanter" :users 15}
|
||||||
|
{:language "Scala" :name "akka" :users 150}])
|
||||||
|
(is (= 2 (.count (mc/find db collection {:language {$eq "Clojure"}}))))))
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; $ne
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest find-with-ne-operator
|
||||||
|
(let [collection "libraries"]
|
||||||
|
(mc/insert-batch db collection [{:language "Ruby" :name "mongoid" :users 1}
|
||||||
|
{:language "Clojure" :name "langohr" :users 5}
|
||||||
|
{:language "Clojure" :name "incanter" :users 15}
|
||||||
|
{:language "Scala" :name "akka" :users 150}])
|
||||||
|
(is (= 2 (.count (mc/find db collection {:language {$ne "Clojure"}}))))))
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; $and, $or, $nor
|
;; $and, $or, $nor
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(deftest find-with-and-or-operators
|
(deftest find-with-and-or-operators
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(mgcol/insert-batch collection [{:language "Ruby" :name "mongoid" :users 1}
|
(mc/insert-batch db collection [{:language "Ruby" :name "mongoid" :users 1}
|
||||||
{:language "Clojure" :name "langohr" :users 5}
|
{:language "Clojure" :name "langohr" :users 5}
|
||||||
{:language "Clojure" :name "incanter" :users 15}
|
{:language "Clojure" :name "incanter" :users 15}
|
||||||
{:language "Scala" :name "akka" :users 150}])
|
{:language "Scala" :name "akka" :users 150}])
|
||||||
(is (= 1 (.count (mgcol/find collection {$and [{:language "Clojure"}
|
(is (= 1 (.count (mc/find db collection {$and [{:language "Clojure"}
|
||||||
{:users {$gt 10}}]}))))
|
{:users {$gt 10}}]}))))
|
||||||
(is (= 3 (.count (mgcol/find collection {$or [{:language "Clojure"}
|
(is (= 3 (.count (mc/find db collection {$or [{:language "Clojure"}
|
||||||
{:users {$gt 10}} ]}))))
|
{:users {$gt 10}} ]}))))
|
||||||
(is (= 1 (.count (mgcol/find collection {$nor [{:language "Clojure"}
|
(is (= 1 (.count (mc/find db collection {$nor [{:language "Clojure"}
|
||||||
{:users {$gt 10}} ]}))))))
|
{:users {$gt 10}} ]}))))))
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; $all, $in, $nin
|
;; $all, $in, $nin
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(deftest find-on-embedded-arrays
|
(deftest find-on-embedded-arrays
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(mgcol/insert-batch collection [{:language "Clojure" :tags [ "functional" ]}
|
(mc/insert-batch db collection [{:language "Clojure" :tags [ "functional" ]}
|
||||||
{:language "Scala" :tags [ "functional" "object-oriented" ]}
|
{:language "Scala" :tags [ "functional" "object-oriented" ]}
|
||||||
{:language "Ruby" :tags [ "object-oriented" "dynamic" ]}])
|
{:language "Ruby" :tags [ "object-oriented" "dynamic" ]}])
|
||||||
|
|
||||||
(is (= "Scala" (:language (first (mgcol/find-maps collection {:tags {$all [ "functional" "object-oriented" ]}} )))))
|
(is (= "Scala" (:language (first (mc/find-maps db collection {:tags {$all [ "functional" "object-oriented" ]}} )))))
|
||||||
(is (= 3 (.count (mgcol/find-maps collection {:tags {$in [ "functional" "object-oriented" ]}} ))))
|
(is (= 3 (.count (mc/find-maps db collection {:tags {$in [ "functional" "object-oriented" ]}} ))))
|
||||||
(is (= 2 (.count (mgcol/find-maps collection {:language {$in [ "Scala" "Ruby" ]}} ))))
|
(is (= 2 (.count (mc/find-maps db collection {:language {$in [ "Scala" "Ruby" ]}} ))))
|
||||||
(is (= 1 (.count (mgcol/find-maps collection {:tags {$nin [ "dynamic" "object-oriented" ]}} ))))
|
(is (= 1 (.count (mc/find-maps db collection {:tags {$nin [ "dynamic" "object-oriented" ]}} ))))
|
||||||
(is (= 3 (.count (mgcol/find-maps collection {:language {$nin [ "C#" ]}} ))))))
|
(is (= 3 (.count (mc/find-maps db collection {:language {$nin [ "C#" ]}} ))))))
|
||||||
|
|
||||||
|
|
||||||
(deftest find-with-conditional-operators-on-embedded-documents
|
(deftest find-with-conditional-operators-on-embedded-documents
|
||||||
(let [collection "people"]
|
(let [collection "people"]
|
||||||
(mgcol/insert-batch collection [{:name "Bob" :comments [{:text "Nice!" :rating 1}
|
(mc/insert-batch db collection [{:name "Bob" :comments [{:text "Nice!" :rating 1}
|
||||||
{:text "Love it" :rating 4}
|
{:text "Love it" :rating 4}
|
||||||
{:text "What?":rating -5} ]}
|
{:text "What?":rating -5} ]}
|
||||||
{:name "Alice" :comments [{:text "Yeah" :rating 2}
|
{:name "Alice" :comments [{:text "Yeah" :rating 2}
|
||||||
{:text "Doh" :rating 1}
|
{:text "Doh" :rating 1}
|
||||||
{:text "Agreed" :rating 3}]}])
|
{:text "Agreed" :rating 3}]}])
|
||||||
(are [a b] (= a (.count (mgcol/find collection b)))
|
(are [a b] (= a (.count (mc/find db collection b)))
|
||||||
1 {:comments {$elemMatch {:text "Nice!" :rating {$gte 1}}}}
|
1 {:comments {$elemMatch {:text "Nice!" :rating {$gte 1}}}}
|
||||||
2 {"comments.rating" 1}
|
2 {"comments.rating" 1}
|
||||||
1 {"comments.rating" {$gt 3}})))
|
1 {"comments.rating" {$gt 3}})))
|
||||||
|
|
||||||
(deftest find-with-regex-operator
|
(deftest find-with-regex-operator
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(mgcol/insert-batch collection [{:language "Ruby" :name "Mongoid" :users 1}
|
(mc/insert-batch db collection [{:language "Ruby" :name "Mongoid" :users 1}
|
||||||
{:language "Clojure" :name "Langohr" :users 5}
|
{:language "Clojure" :name "Langohr" :users 5}
|
||||||
{:language "Clojure" :name "Incanter" :users 15}
|
{:language "Clojure" :name "Incanter" :users 15}
|
||||||
{:language "Scala" :name "Akka" :users 150}])
|
{:language "Scala" :name "Akka" :users 150}])
|
||||||
(are [query results] (is (= results (.count (mgcol/find collection query))))
|
(are [query results] (is (= results (.count (mc/find db collection query))))
|
||||||
{:language {$regex "Clo.*"}} 2
|
{:language {$regex "Clo.*"}} 2
|
||||||
{:language {$regex "clo.*" $options "i"}} 2
|
{:language {$regex "clo.*" $options "i"}} 2
|
||||||
{:name {$regex "aK.*" $options "i"}} 1
|
{:name {$regex "aK.*" $options "i"}} 1
|
||||||
{:language {$regex ".*by"}} 1
|
{:language {$regex ".*by"}} 1
|
||||||
{:language {$regex ".*ala.*"}} 1)))
|
{:language {$regex ".*ala.*"}} 1)))
|
||||||
|
|
||||||
|
(deftest find-with-js-expression
|
||||||
|
(let [collection "people"]
|
||||||
|
(mc/insert-batch db collection [{:name "Bob" :placeOfBirth "New York" :address {:city "New York"}}
|
||||||
|
{:name "Alice" :placeOfBirth "New York" :address {:city "Los Angeles"}}])
|
||||||
|
(is (= 1 (.count (mc/find db collection {$where "this.placeOfBirth === this.address.city"})))))))
|
||||||
|
|
|
||||||
|
|
@ -1,287 +1,325 @@
|
||||||
(set! *warn-on-reflection* true)
|
|
||||||
|
|
||||||
(ns monger.test.querying-test
|
(ns monger.test.querying-test
|
||||||
(:refer-clojure :exclude [select find sort])
|
(:refer-clojure :exclude [select find sort])
|
||||||
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject CommandResult$CommandFailure ReadPreference]
|
(:import [com.mongodb WriteResult WriteConcern DBObject ReadPreference]
|
||||||
org.bson.types.ObjectId
|
org.bson.types.ObjectId
|
||||||
java.util.Date)
|
java.util.Date)
|
||||||
(:require [monger core util]
|
(:require [monger.core :as mg]
|
||||||
[monger.collection :as mgcol]
|
[monger.collection :as mc]
|
||||||
|
monger.joda-time
|
||||||
[monger.result :as mgres]
|
[monger.result :as mgres]
|
||||||
[monger.test.helper :as helper])
|
[clojure.test :refer :all]
|
||||||
(:use clojure.test
|
[monger.conversion :refer :all]
|
||||||
monger.test.fixtures
|
[monger.query :refer :all]
|
||||||
[monger conversion query operators joda-time]
|
[monger.operators :refer :all]
|
||||||
[clj-time.core :only [date-time]]))
|
[clj-time.core :refer [date-time]]))
|
||||||
|
|
||||||
(helper/connect!)
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")]
|
||||||
|
|
||||||
(use-fixtures :each purge-docs purge-things purge-locations)
|
(defn purge-collections
|
||||||
|
[f]
|
||||||
|
(mc/remove db "docs")
|
||||||
|
(mc/remove db "things")
|
||||||
|
(mc/remove db "locations")
|
||||||
|
(mc/remove db "querying_docs")
|
||||||
|
(f)
|
||||||
|
(mc/remove db "docs")
|
||||||
|
(mc/remove db "things")
|
||||||
|
(mc/remove db "locations")
|
||||||
|
(mc/remove db "querying_docs"))
|
||||||
|
|
||||||
|
(use-fixtures :each purge-collections)
|
||||||
|
|
||||||
|
;;
|
||||||
|
;; monger.collection/* finders ("low-level API")
|
||||||
|
;;
|
||||||
|
|
||||||
|
;; by ObjectId
|
||||||
|
|
||||||
|
(deftest query-full-document-by-object-id
|
||||||
|
(let [coll "querying_docs"
|
||||||
|
oid (ObjectId.)
|
||||||
|
doc { :_id oid :title "Introducing Monger" }]
|
||||||
|
(mc/insert db coll doc)
|
||||||
|
(is (= doc (mc/find-map-by-id db coll oid)))
|
||||||
|
(is (= doc (mc/find-one-as-map db coll { :_id oid })))))
|
||||||
|
|
||||||
|
|
||||||
;;
|
;; exact match over string field
|
||||||
;; monger.collection/* finders ("low-level API")
|
|
||||||
;;
|
|
||||||
|
|
||||||
;; by ObjectId
|
(deftest query-full-document-using-exact-matching-over-string-field
|
||||||
|
(let [coll "querying_docs"
|
||||||
(deftest query-full-document-by-object-id
|
doc { :title "monger" :language "Clojure" :_id (ObjectId.) }]
|
||||||
(let [coll "docs"
|
(mc/insert db coll doc)
|
||||||
oid (ObjectId.)
|
(is (= [doc] (mc/find-maps db coll { :title "monger" })))
|
||||||
doc { :_id oid :title "Introducing Monger" }]
|
(is (= doc (from-db-object (first (mc/find db coll { :title "monger" })) true)))))
|
||||||
(mgcol/insert coll doc)
|
|
||||||
(is (= doc (mgcol/find-map-by-id coll oid)))
|
|
||||||
(is (= doc (mgcol/find-one-as-map coll { :_id oid })))))
|
|
||||||
|
|
||||||
|
|
||||||
;; exact match over string field
|
;; exact match over string field with limit
|
||||||
|
|
||||||
(deftest query-full-document-using-exact-matching-over-string-field
|
(deftest query-full-document-using-exact-matching-over-string-with-field-with-limit
|
||||||
(let [coll "docs"
|
(let [coll "querying_docs"
|
||||||
doc { :title "monger" :language "Clojure" :_id (ObjectId.) }]
|
doc1 { :title "monger" :language "Clojure" :_id (ObjectId.) }
|
||||||
(mgcol/insert coll doc)
|
doc2 { :title "langohr" :language "Clojure" :_id (ObjectId.) }
|
||||||
(is (= [doc] (mgcol/find-maps coll { :title "monger" })))
|
doc3 { :title "netty" :language "Java" :_id (ObjectId.) }
|
||||||
(is (= doc (from-db-object (first (mgcol/find coll { :title "monger" })) true)))))
|
_ (mc/insert-batch db coll [doc1 doc2 doc3])
|
||||||
|
result (with-collection db coll
|
||||||
|
(find { :title "monger" })
|
||||||
|
(fields [:title, :language, :_id])
|
||||||
|
(skip 0)
|
||||||
|
(limit 1))]
|
||||||
|
(is (= 1 (count result)))
|
||||||
|
(is (= [doc1] result))))
|
||||||
|
|
||||||
|
|
||||||
;; exact match over string field with limit
|
(deftest query-full-document-using-exact-matching-over-string-field-with-limit-and-offset
|
||||||
|
(let [coll "querying_docs"
|
||||||
|
doc1 { :title "lucene" :language "Java" :_id (ObjectId.) }
|
||||||
|
doc2 { :title "joda-time" :language "Java" :_id (ObjectId.) }
|
||||||
|
doc3 { :title "netty" :language "Java" :_id (ObjectId.) }
|
||||||
|
_ (mc/insert-batch db coll [doc1 doc2 doc3])
|
||||||
|
result (with-collection db coll
|
||||||
|
(find { :language "Java" })
|
||||||
|
(skip 1)
|
||||||
|
(limit 2)
|
||||||
|
(sort { :title 1 }))]
|
||||||
|
(is (= 2 (count result)))
|
||||||
|
(is (= [doc1 doc3] result))))
|
||||||
|
|
||||||
(deftest query-full-document-using-exact-matching-over-string-with-field-with-limit
|
(deftest query-with-sorting-on-multiple-fields
|
||||||
(let [coll "docs"
|
(let [coll "querying_docs"
|
||||||
doc1 { :title "monger" :language "Clojure" :_id (ObjectId.) }
|
doc1 { :a 1 :b 2 :c 3 :text "Whatever" :_id (ObjectId.) }
|
||||||
doc2 { :title "langohr" :language "Clojure" :_id (ObjectId.) }
|
doc2 { :a 1 :b 1 :c 4 :text "Blah " :_id (ObjectId.) }
|
||||||
doc3 { :title "netty" :language "Java" :_id (ObjectId.) }
|
doc3 { :a 10 :b 3 :c 1 :text "Abc" :_id (ObjectId.) }
|
||||||
_ (mgcol/insert-batch coll [doc1 doc2 doc3])
|
doc4 { :a 10 :b 3 :c 3 :text "Abc" :_id (ObjectId.) }
|
||||||
result (with-collection coll
|
_ (mc/insert-batch db coll [doc1 doc2 doc3 doc4])
|
||||||
(find { :title "monger" })
|
result1 (with-collection db coll
|
||||||
(fields [:title, :language, :_id])
|
(find {})
|
||||||
(skip 0)
|
(limit 2)
|
||||||
(limit 1))]
|
(fields [:a :b :c :text])
|
||||||
(is (= 1 (count result)))
|
(sort (sorted-map :a 1 :b 1 :text -1)))
|
||||||
(is (= [doc1] result))))
|
result2 (with-collection db coll
|
||||||
|
(find {})
|
||||||
|
(limit 2)
|
||||||
|
(fields [:a :b :c :text])
|
||||||
|
(sort (array-map :c 1 :text -1)))
|
||||||
|
result3 (with-collection db coll
|
||||||
|
(find {})
|
||||||
|
(limit 2)
|
||||||
|
(fields [:a :b :c :text])
|
||||||
|
(sort (array-map :c -1 :text 1)))]
|
||||||
|
(is (= [doc2 doc1] result1))
|
||||||
|
(is (= [doc3 doc1] result2))
|
||||||
|
(is (= [doc2 doc4] result3))))
|
||||||
|
|
||||||
|
|
||||||
(deftest query-full-document-using-exact-matching-over-string-field-with-limit-and-offset
|
;; < ($lt), <= ($lte), > ($gt), >= ($gte)
|
||||||
(let [coll "docs"
|
|
||||||
doc1 { :title "lucene" :language "Java" :_id (ObjectId.) }
|
(deftest query-using-dsl-and-$lt-operator-with-integers
|
||||||
doc2 { :title "joda-time" :language "Java" :_id (ObjectId.) }
|
(let [coll "querying_docs"
|
||||||
doc3 { :title "netty" :language "Java" :_id (ObjectId.) }
|
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year 2006 }
|
||||||
_ (mgcol/insert-batch coll [doc1 doc2 doc3])
|
doc2 { :language "Java" :_id (ObjectId.) :inception_year 1992 }
|
||||||
result (with-collection coll
|
doc3 { :language "Scala" :_id (ObjectId.) :inception_year 2003 }
|
||||||
(find { :language "Java" })
|
_ (mc/insert-batch db coll [doc1 doc2])
|
||||||
(skip 1)
|
lt-result (with-collection db coll
|
||||||
(limit 2)
|
(find { :inception_year { $lt 2000 } })
|
||||||
(sort { :title 1 }))]
|
(limit 2))]
|
||||||
(is (= 2 (count result)))
|
(is (= [doc2] (vec lt-result)))))
|
||||||
(is (= [doc1 doc3] result))))
|
|
||||||
|
|
||||||
|
|
||||||
;; < ($lt), <= ($lte), > ($gt), >= ($gte)
|
(deftest query-using-dsl-and-$lt-operator-with-dates
|
||||||
|
(let [coll "querying_docs"
|
||||||
|
;; these rely on monger.joda-time being loaded. MK.
|
||||||
|
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year (date-time 2006 1 1) }
|
||||||
|
doc2 { :language "Java" :_id (ObjectId.) :inception_year (date-time 1992 1 2) }
|
||||||
|
doc3 { :language "Scala" :_id (ObjectId.) :inception_year (date-time 2003 3 3) }
|
||||||
|
_ (mc/insert-batch db coll [doc1 doc2])
|
||||||
|
lt-result (with-collection db coll
|
||||||
|
(find { :inception_year { $lt (date-time 2000 1 2) } })
|
||||||
|
(limit 2))]
|
||||||
|
(is (= (map :_id [doc2])
|
||||||
|
(map :_id (vec lt-result))))))
|
||||||
|
|
||||||
(deftest query-using-dsl-and-$lt-operator-with-integers
|
(deftest query-using-both-$lte-and-$gte-operators-with-dates
|
||||||
(let [coll "docs"
|
(let [coll "querying_docs"
|
||||||
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year 2006 }
|
;; these rely on monger.joda-time being loaded. MK.
|
||||||
doc2 { :language "Java" :_id (ObjectId.) :inception_year 1992 }
|
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year (date-time 2006 1 1) }
|
||||||
doc3 { :language "Scala" :_id (ObjectId.) :inception_year 2003 }
|
doc2 { :language "Java" :_id (ObjectId.) :inception_year (date-time 1992 1 2) }
|
||||||
_ (mgcol/insert-batch coll [doc1 doc2])
|
doc3 { :language "Scala" :_id (ObjectId.) :inception_year (date-time 2003 3 3) }
|
||||||
lt-result (with-collection "docs"
|
_ (mc/insert-batch db coll [doc1 doc2 doc3])
|
||||||
(find { :inception_year { $lt 2000 } })
|
lt-result (with-collection db coll
|
||||||
(limit 2))]
|
(find { :inception_year { $gt (date-time 2000 1 2) $lte (date-time 2007 2 2) } })
|
||||||
(is (= [doc2] (vec lt-result)))))
|
(sort { :inception_year 1 }))]
|
||||||
|
(is (= (map :_id [doc3 doc1])
|
||||||
|
(map :_id (vec lt-result))))))
|
||||||
|
|
||||||
|
|
||||||
(deftest query-using-dsl-and-$lt-operator-with-dates
|
(deftest query-using-$gt-$lt-$gte-$lte-operators-as-strings
|
||||||
(let [coll "docs"
|
(let [coll "querying_docs"
|
||||||
;; these rely on monger.joda-time being loaded. MK.
|
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year 2006 }
|
||||||
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year (date-time 2006 1 1) }
|
doc2 { :language "Java" :_id (ObjectId.) :inception_year 1992 }
|
||||||
doc2 { :language "Java" :_id (ObjectId.) :inception_year (date-time 1992 1 2) }
|
doc3 { :language "Scala" :_id (ObjectId.) :inception_year 2003 }
|
||||||
doc3 { :language "Scala" :_id (ObjectId.) :inception_year (date-time 2003 3 3) }
|
_ (mc/insert-batch db coll [doc1 doc2 doc3])]
|
||||||
_ (mgcol/insert-batch coll [doc1 doc2])
|
(are [doc, result]
|
||||||
lt-result (with-collection "docs"
|
(= doc, result)
|
||||||
(find { :inception_year { $lt (date-time 2000 1 2) } })
|
(doc2 (with-collection db coll
|
||||||
(limit 2))]
|
(find { :inception_year { "$lt" 2000 } })))
|
||||||
(is (= (map :_id [doc2])
|
(doc2 (with-collection db coll
|
||||||
(map :_id (vec lt-result))))))
|
(find { :inception_year { "$lte" 1992 } })))
|
||||||
|
(doc1 (with-collection db coll
|
||||||
(deftest query-using-both-$lte-and-$gte-operators-with-dates
|
(find { :inception_year { "$gt" 2002 } })
|
||||||
(let [coll "docs"
|
(limit 1)
|
||||||
;; these rely on monger.joda-time being loaded. MK.
|
(sort { :inception_year -1 })))
|
||||||
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year (date-time 2006 1 1) }
|
(doc1 (with-collection db coll
|
||||||
doc2 { :language "Java" :_id (ObjectId.) :inception_year (date-time 1992 1 2) }
|
(find { :inception_year { "$gte" 2006 } }))))))
|
||||||
doc3 { :language "Scala" :_id (ObjectId.) :inception_year (date-time 2003 3 3) }
|
|
||||||
_ (mgcol/insert-batch coll [doc1 doc2 doc3])
|
|
||||||
lt-result (with-collection "docs"
|
|
||||||
(find { :inception_year { $gt (date-time 2000 1 2) $lte (date-time 2007 2 2) } })
|
|
||||||
(sort { :inception_year 1 }))]
|
|
||||||
(is (= (map :_id [doc3 doc1])
|
|
||||||
(map :_id (vec lt-result))))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest query-using-$gt-$lt-$gte-$lte-operators-as-strings
|
(deftest query-using-$gt-$lt-$gte-$lte-operators-using-dsl-composition
|
||||||
(let [coll "docs"
|
(let [coll "querying_docs"
|
||||||
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year 2006 }
|
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year 2006 }
|
||||||
doc2 { :language "Java" :_id (ObjectId.) :inception_year 1992 }
|
doc2 { :language "Java" :_id (ObjectId.) :inception_year 1992 }
|
||||||
doc3 { :language "Scala" :_id (ObjectId.) :inception_year 2003 }
|
doc3 { :language "Scala" :_id (ObjectId.) :inception_year 2003 }
|
||||||
_ (mgcol/insert-batch coll [doc1 doc2 doc3])]
|
srt (-> {}
|
||||||
(are [doc, result]
|
(limit 1)
|
||||||
(= doc, result)
|
(sort { :inception_year -1 }))
|
||||||
(doc2 (with-collection coll
|
_ (mc/insert-batch db coll [doc1 doc2 doc3])]
|
||||||
(find { :inception_year { "$lt" 2000 } })))
|
(is (= [doc1] (with-collection db coll
|
||||||
(doc2 (with-collection coll
|
(find { :inception_year { "$gt" 2002 } })
|
||||||
(find { :inception_year { "$lte" 1992 } })))
|
(merge srt))))))
|
||||||
(doc1 (with-collection coll
|
|
||||||
(find { :inception_year { "$gt" 2002 } })
|
|
||||||
(limit 1)
|
|
||||||
(sort { :inception_year -1 })))
|
|
||||||
(doc1 (with-collection coll
|
|
||||||
(find { :inception_year { "$gte" 2006 } }))))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest query-using-$gt-$lt-$gte-$lte-operators-using-dsl-composition
|
;; $all
|
||||||
(let [coll "docs"
|
|
||||||
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year 2006 }
|
(deftest query-with-using-$all
|
||||||
doc2 { :language "Java" :_id (ObjectId.) :inception_year 1992 }
|
(let [coll "querying_docs"
|
||||||
doc3 { :language "Scala" :_id (ObjectId.) :inception_year 2003 }
|
doc1 { :_id (ObjectId.) :title "Clojure" :tags ["functional" "homoiconic" "syntax-oriented" "dsls" "concurrency features" "jvm"] }
|
||||||
srt (-> {}
|
doc2 { :_id (ObjectId.) :title "Java" :tags ["object-oriented" "jvm"] }
|
||||||
(limit 1)
|
doc3 { :_id (ObjectId.) :title "Scala" :tags ["functional" "object-oriented" "dsls" "concurrency features" "jvm"] }
|
||||||
(sort { :inception_year -1 }))
|
- (mc/insert-batch db coll [doc1 doc2 doc3])
|
||||||
_ (mgcol/insert-batch coll [doc1 doc2 doc3])]
|
result1 (with-collection db coll
|
||||||
(is (= [doc1] (with-collection coll
|
(find { :tags { "$all" ["functional" "jvm" "homoiconic"] } }))
|
||||||
(find { :inception_year { "$gt" 2002 } })
|
result2 (with-collection db coll
|
||||||
(merge srt))))))
|
(find { :tags { "$all" ["functional" "native" "homoiconic"] } }))
|
||||||
|
result3 (with-collection db coll
|
||||||
|
(find { :tags { "$all" ["functional" "jvm" "dsls"] } })
|
||||||
|
(sort { :title 1 }))]
|
||||||
|
(is (= [doc1] result1))
|
||||||
|
(is (empty? result2))
|
||||||
|
(is (= 2 (count result3)))
|
||||||
|
(is (= doc1 (first result3)))))
|
||||||
|
|
||||||
|
|
||||||
;; $all
|
;; $exists
|
||||||
|
|
||||||
(deftest query-with-using-$all
|
(deftest query-with-find-one-as-map-using-$exists
|
||||||
(let [coll "docs"
|
(let [coll "querying_docs"
|
||||||
doc1 { :_id (ObjectId.) :title "Clojure" :tags ["functional" "homoiconic" "syntax-oriented" "dsls" "concurrency features" "jvm"] }
|
doc1 { :_id (ObjectId.) :published-by "Jill The Blogger" :draft false :title "X announces another Y" }
|
||||||
doc2 { :_id (ObjectId.) :title "Java" :tags ["object-oriented" "jvm"] }
|
doc2 { :_id (ObjectId.) :draft true :title "Z announces a Y competitor" }
|
||||||
doc3 { :_id (ObjectId.) :title "Scala" :tags ["functional" "object-oriented" "dsls" "concurrency features" "jvm"] }
|
_ (mc/insert-batch db coll [doc1 doc2])
|
||||||
- (mgcol/insert-batch coll [doc1 doc2 doc3])
|
result1 (mc/find-one-as-map db coll { :published-by { "$exists" true } })
|
||||||
result1 (with-collection coll
|
result2 (mc/find-one-as-map db coll { :published-by { "$exists" false } })]
|
||||||
(find { :tags { "$all" ["functional" "jvm" "homoiconic"] } }))
|
(is (= doc1 result1))
|
||||||
result2 (with-collection coll
|
(is (= doc2 result2))))
|
||||||
(find { :tags { "$all" ["functional" "native" "homoiconic"] } }))
|
|
||||||
result3 (with-collection coll
|
;; $mod
|
||||||
(find { :tags { "$all" ["functional" "jvm" "dsls"] } })
|
|
||||||
(sort { :title 1 }))]
|
(deftest query-with-find-one-as-map-using-$mod
|
||||||
(is (= [doc1] result1))
|
(let [coll "querying_docs"
|
||||||
(is (empty? result2))
|
doc1 { :_id (ObjectId.) :counter 25 }
|
||||||
(is (= 2 (count result3)))
|
doc2 { :_id (ObjectId.) :counter 32 }
|
||||||
(is (= doc1 (first result3)))))
|
doc3 { :_id (ObjectId.) :counter 63 }
|
||||||
|
_ (mc/insert-batch db coll [doc1 doc2 doc3])
|
||||||
|
result1 (mc/find-one-as-map db coll { :counter { "$mod" [10, 5] } })
|
||||||
|
result2 (mc/find-one-as-map db coll { :counter { "$mod" [10, 2] } })
|
||||||
|
result3 (mc/find-one-as-map db coll { :counter { "$mod" [11, 1] } })]
|
||||||
|
(is (= doc1 result1))
|
||||||
|
(is (= doc2 result2))
|
||||||
|
(is (empty? result3))))
|
||||||
|
|
||||||
|
|
||||||
;; $exists
|
;; $ne
|
||||||
|
|
||||||
(deftest query-with-find-one-as-map-using-$exists
|
(deftest query-with-find-one-as-map-using-$ne
|
||||||
(let [coll "docs"
|
(let [coll "querying_docs"
|
||||||
doc1 { :_id (ObjectId.) :published-by "Jill The Blogger" :draft false :title "X announces another Y" }
|
doc1 { :_id (ObjectId.) :counter 25 }
|
||||||
doc2 { :_id (ObjectId.) :draft true :title "Z announces a Y competitor" }
|
doc2 { :_id (ObjectId.) :counter 32 }
|
||||||
_ (mgcol/insert-batch coll [doc1 doc2])
|
_ (mc/insert-batch db coll [doc1 doc2])
|
||||||
result1 (mgcol/find-one-as-map coll { :published-by { "$exists" true } })
|
result1 (mc/find-one-as-map db coll { :counter { "$ne" 25 } })
|
||||||
result2 (mgcol/find-one-as-map coll { :published-by { "$exists" false } })]
|
result2 (mc/find-one-as-map db coll { :counter { "$ne" 32 } })]
|
||||||
(is (= doc1 result1))
|
(is (= doc2 result1))
|
||||||
(is (= doc2 result2))))
|
(is (= doc1 result2))))
|
||||||
|
|
||||||
;; $mod
|
;;
|
||||||
|
;; monger.query DSL features
|
||||||
|
;;
|
||||||
|
|
||||||
(deftest query-with-find-one-as-map-using-$mod
|
;; pagination
|
||||||
(let [coll "docs"
|
(deftest query-using-pagination-dsl
|
||||||
doc1 { :_id (ObjectId.) :counter 25 }
|
(let [coll "querying_docs"
|
||||||
doc2 { :_id (ObjectId.) :counter 32 }
|
doc1 { :_id (ObjectId.) :title "Clojure" :tags ["functional" "homoiconic" "syntax-oriented" "dsls" "concurrency features" "jvm"] }
|
||||||
doc3 { :_id (ObjectId.) :counter 63 }
|
doc2 { :_id (ObjectId.) :title "Java" :tags ["object-oriented" "jvm"] }
|
||||||
_ (mgcol/insert-batch coll [doc1 doc2 doc3])
|
doc3 { :_id (ObjectId.) :title "Scala" :tags ["functional" "object-oriented" "dsls" "concurrency features" "jvm"] }
|
||||||
result1 (mgcol/find-one-as-map coll { :counter { "$mod" [10, 5] } })
|
doc4 { :_id (ObjectId.) :title "Ruby" :tags ["dynamic" "object-oriented" "dsls" "jvm"] }
|
||||||
result2 (mgcol/find-one-as-map coll { :counter { "$mod" [10, 2] } })
|
doc5 { :_id (ObjectId.) :title "Groovy" :tags ["dynamic" "object-oriented" "dsls" "jvm"] }
|
||||||
result3 (mgcol/find-one-as-map coll { :counter { "$mod" [11, 1] } })]
|
doc6 { :_id (ObjectId.) :title "OCaml" :tags ["functional" "static" "dsls"] }
|
||||||
(is (= doc1 result1))
|
doc7 { :_id (ObjectId.) :title "Haskell" :tags ["functional" "static" "dsls" "concurrency features"] }
|
||||||
(is (= doc2 result2))
|
- (mc/insert-batch db coll [doc1 doc2 doc3 doc4 doc5 doc6 doc7])
|
||||||
(is (empty? result3))))
|
result1 (with-collection db coll
|
||||||
|
(find {})
|
||||||
|
(paginate :page 1 :per-page 3)
|
||||||
|
(sort { :title 1 })
|
||||||
|
(read-preference (ReadPreference/primary))
|
||||||
|
(options com.mongodb.Bytes/QUERYOPTION_NOTIMEOUT))
|
||||||
|
result2 (with-collection db coll
|
||||||
|
(find {})
|
||||||
|
(paginate :page 2 :per-page 3)
|
||||||
|
(sort { :title 1 }))
|
||||||
|
result3 (with-collection db coll
|
||||||
|
(find {})
|
||||||
|
(paginate :page 3 :per-page 3)
|
||||||
|
(sort { :title 1 }))
|
||||||
|
result4 (with-collection db coll
|
||||||
|
(find {})
|
||||||
|
(paginate :page 10 :per-page 3)
|
||||||
|
(sort { :title 1 }))]
|
||||||
|
(is (= [doc1 doc5 doc7] result1))
|
||||||
|
(is (= [doc2 doc6 doc4] result2))
|
||||||
|
(is (= [doc3] result3))
|
||||||
|
(is (empty? result4))))
|
||||||
|
|
||||||
|
|
||||||
;; $ne
|
(deftest combined-querying-dsl-example1
|
||||||
|
(let [coll "querying_docs"
|
||||||
|
ma-doc { :_id (ObjectId.) :name "Massachusetts" :iso "MA" :population 6547629 :joined_in 1788 :capital "Boston" }
|
||||||
|
de-doc { :_id (ObjectId.) :name "Delaware" :iso "DE" :population 897934 :joined_in 1787 :capital "Dover" }
|
||||||
|
ny-doc { :_id (ObjectId.) :name "New York" :iso "NY" :population 19378102 :joined_in 1788 :capital "Albany" }
|
||||||
|
ca-doc { :_id (ObjectId.) :name "California" :iso "CA" :population 37253956 :joined_in 1850 :capital "Sacramento" }
|
||||||
|
tx-doc { :_id (ObjectId.) :name "Texas" :iso "TX" :population 25145561 :joined_in 1845 :capital "Austin" }
|
||||||
|
top3 (partial-query (limit 3))
|
||||||
|
by-population-desc (partial-query (sort { :population -1 }))
|
||||||
|
_ (mc/insert-batch db coll [ma-doc de-doc ny-doc ca-doc tx-doc])
|
||||||
|
result (with-collection db coll
|
||||||
|
(find {})
|
||||||
|
(merge top3)
|
||||||
|
(merge by-population-desc))]
|
||||||
|
(is (= result [ca-doc tx-doc ny-doc]))))
|
||||||
|
|
||||||
(deftest query-with-find-one-as-map-using-$ne
|
(deftest combined-querying-dsl-example2
|
||||||
(let [coll "docs"
|
(let [coll "querying_docs"
|
||||||
doc1 { :_id (ObjectId.) :counter 25 }
|
ma-doc { :_id (ObjectId.) :name "Massachusetts" :iso "MA" :population 6547629 :joined_in 1788 :capital "Boston" }
|
||||||
doc2 { :_id (ObjectId.) :counter 32 }
|
de-doc { :_id (ObjectId.) :name "Delaware" :iso "DE" :population 897934 :joined_in 1787 :capital "Dover" }
|
||||||
_ (mgcol/insert-batch coll [doc1 doc2])
|
ny-doc { :_id (ObjectId.) :name "New York" :iso "NY" :population 19378102 :joined_in 1788 :capital "Albany" }
|
||||||
result1 (mgcol/find-one-as-map coll { :counter { "$ne" 25 } })
|
ca-doc { :_id (ObjectId.) :name "California" :iso "CA" :population 37253956 :joined_in 1850 :capital "Sacramento" }
|
||||||
result2 (mgcol/find-one-as-map coll { :counter { "$ne" 32 } })]
|
tx-doc { :_id (ObjectId.) :name "Texas" :iso "TX" :population 25145561 :joined_in 1845 :capital "Austin" }
|
||||||
(is (= doc2 result1))
|
top3 (partial-query (limit 3))
|
||||||
(is (= doc1 result2))))
|
by-population-desc (partial-query (sort { :population -1 }))
|
||||||
|
_ (mc/insert-batch db coll [ma-doc de-doc ny-doc ca-doc tx-doc])
|
||||||
;;
|
result (with-collection db coll
|
||||||
;; monger.query DSL features
|
(find {})
|
||||||
;;
|
(merge top3)
|
||||||
|
(merge by-population-desc)
|
||||||
;; pagination
|
(keywordize-fields false))]
|
||||||
(deftest query-using-pagination-dsl
|
;; documents have fields as strings,
|
||||||
(let [coll "docs"
|
;; not keywords
|
||||||
doc1 { :_id (ObjectId.) :title "Clojure" :tags ["functional" "homoiconic" "syntax-oriented" "dsls" "concurrency features" "jvm"] }
|
(is (= (map #(% "name") result)
|
||||||
doc2 { :_id (ObjectId.) :title "Java" :tags ["object-oriented" "jvm"] }
|
(map #(% :name) [ca-doc tx-doc ny-doc]))))))
|
||||||
doc3 { :_id (ObjectId.) :title "Scala" :tags ["functional" "object-oriented" "dsls" "concurrency features" "jvm"] }
|
|
||||||
doc4 { :_id (ObjectId.) :title "Ruby" :tags ["dynamic" "object-oriented" "dsls" "jvm"] }
|
|
||||||
doc5 { :_id (ObjectId.) :title "Groovy" :tags ["dynamic" "object-oriented" "dsls" "jvm"] }
|
|
||||||
doc6 { :_id (ObjectId.) :title "OCaml" :tags ["functional" "static" "dsls"] }
|
|
||||||
doc7 { :_id (ObjectId.) :title "Haskell" :tags ["functional" "static" "dsls" "concurrency features"] }
|
|
||||||
- (mgcol/insert-batch coll [doc1 doc2 doc3 doc4 doc5 doc6 doc7])
|
|
||||||
result1 (with-collection coll
|
|
||||||
(find {})
|
|
||||||
(paginate :page 1 :per-page 3)
|
|
||||||
(sort { :title 1 })
|
|
||||||
(read-preference ReadPreference/PRIMARY))
|
|
||||||
result2 (with-collection coll
|
|
||||||
(find {})
|
|
||||||
(paginate :page 2 :per-page 3)
|
|
||||||
(sort { :title 1 }))
|
|
||||||
result3 (with-collection coll
|
|
||||||
(find {})
|
|
||||||
(paginate :page 3 :per-page 3)
|
|
||||||
(sort { :title 1 }))
|
|
||||||
result4 (with-collection coll
|
|
||||||
(find {})
|
|
||||||
(paginate :page 10 :per-page 3)
|
|
||||||
(sort { :title 1 }))]
|
|
||||||
(is (= [doc1 doc5 doc7] result1))
|
|
||||||
(is (= [doc2 doc6 doc4] result2))
|
|
||||||
(is (= [doc3] result3))
|
|
||||||
(is (empty? result4))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest combined-querying-dsl-example1
|
|
||||||
(let [coll "docs"
|
|
||||||
ma-doc { :_id (ObjectId.) :name "Massachusetts" :iso "MA" :population 6547629 :joined_in 1788 :capital "Boston" }
|
|
||||||
de-doc { :_id (ObjectId.) :name "Delaware" :iso "DE" :population 897934 :joined_in 1787 :capital "Dover" }
|
|
||||||
ny-doc { :_id (ObjectId.) :name "New York" :iso "NY" :population 19378102 :joined_in 1788 :capital "Albany" }
|
|
||||||
ca-doc { :_id (ObjectId.) :name "California" :iso "CA" :population 37253956 :joined_in 1850 :capital "Sacramento" }
|
|
||||||
tx-doc { :_id (ObjectId.) :name "Texas" :iso "TX" :population 25145561 :joined_in 1845 :capital "Austin" }
|
|
||||||
top3 (partial-query (limit 3))
|
|
||||||
by-population-desc (partial-query (sort { :population -1 }))
|
|
||||||
_ (mgcol/insert-batch coll [ma-doc de-doc ny-doc ca-doc tx-doc])
|
|
||||||
result (with-collection coll
|
|
||||||
(find {})
|
|
||||||
(merge top3)
|
|
||||||
(merge by-population-desc))]
|
|
||||||
(is (= result [ca-doc tx-doc ny-doc]))))
|
|
||||||
|
|
||||||
(deftest combined-querying-dsl-example2
|
|
||||||
(let [coll "docs"
|
|
||||||
ma-doc { :_id (ObjectId.) :name "Massachusetts" :iso "MA" :population 6547629 :joined_in 1788 :capital "Boston" }
|
|
||||||
de-doc { :_id (ObjectId.) :name "Delaware" :iso "DE" :population 897934 :joined_in 1787 :capital "Dover" }
|
|
||||||
ny-doc { :_id (ObjectId.) :name "New York" :iso "NY" :population 19378102 :joined_in 1788 :capital "Albany" }
|
|
||||||
ca-doc { :_id (ObjectId.) :name "California" :iso "CA" :population 37253956 :joined_in 1850 :capital "Sacramento" }
|
|
||||||
tx-doc { :_id (ObjectId.) :name "Texas" :iso "TX" :population 25145561 :joined_in 1845 :capital "Austin" }
|
|
||||||
top3 (partial-query (limit 3))
|
|
||||||
by-population-desc (partial-query (sort { :population -1 }))
|
|
||||||
_ (mgcol/insert-batch coll [ma-doc de-doc ny-doc ca-doc tx-doc])
|
|
||||||
result (with-collection coll
|
|
||||||
(find {})
|
|
||||||
(merge top3)
|
|
||||||
(merge by-population-desc)
|
|
||||||
(keywordize-fields false))]
|
|
||||||
;; documents have fields as strings,
|
|
||||||
;; not keywords
|
|
||||||
(is (= (map #(% "name") result)
|
|
||||||
(map #(% :name) [ca-doc tx-doc ny-doc])))))
|
|
||||||
|
|
|
||||||
|
|
@ -1,39 +1,55 @@
|
||||||
(set! *warn-on-reflection* true)
|
|
||||||
|
|
||||||
(ns monger.test.ragtime-test
|
(ns monger.test.ragtime-test
|
||||||
(:require [monger.core :as mg]
|
(:require [monger.core :as mg]
|
||||||
[monger.collection :as mc]
|
[monger.collection :as mc]
|
||||||
[monger.test.helper :as helper]
|
monger.ragtime
|
||||||
monger.ragtime)
|
[ragtime.protocols :refer :all]
|
||||||
(:use clojure.test
|
[clojure.test :refer :all]))
|
||||||
[monger.test.fixtures :only [purge-migrations]]
|
|
||||||
ragtime.core))
|
|
||||||
|
|
||||||
|
|
||||||
(helper/connect!)
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")]
|
||||||
|
(defn purge-collections
|
||||||
|
[f]
|
||||||
|
(mc/remove db "meta.migrations")
|
||||||
|
(f)
|
||||||
|
(mc/remove db "meta.migrations"))
|
||||||
|
|
||||||
(use-fixtures :each purge-migrations)
|
(use-fixtures :each purge-collections)
|
||||||
|
|
||||||
|
(when-not (get (System/getenv) "CI")
|
||||||
|
(deftest test-add-migration-id
|
||||||
|
(let [coll "meta.migrations"
|
||||||
|
key "1"]
|
||||||
|
(mc/remove db coll {})
|
||||||
|
(is (not (mc/any? db coll {:_id key})))
|
||||||
|
(is (not (some #{key} (applied-migration-ids db))))
|
||||||
|
(add-migration-id db key)
|
||||||
|
(is (mc/any? db coll {:_id key}))
|
||||||
|
(is (some #{key} (applied-migration-ids db)))))
|
||||||
|
|
||||||
|
|
||||||
(deftest test-add-migration-id
|
(deftest test-remove-migration-id
|
||||||
(let [db (mg/get-db "monger-test")
|
(let [coll "meta.migrations"
|
||||||
coll "meta.migrations"
|
key "1"]
|
||||||
key "1"]
|
(mc/remove db coll {})
|
||||||
(mc/remove db coll {})
|
(add-migration-id db key)
|
||||||
(is (not (mc/any? db coll {:_id key})))
|
(is (mc/any? db coll {:_id key}))
|
||||||
(is (not (contains? (applied-migration-ids db) key)))
|
(is (some #{key} (applied-migration-ids db)))
|
||||||
(add-migration-id db key)
|
(remove-migration-id db key)
|
||||||
(is (mc/any? db coll {:_id key}))
|
(is (not (some #{key} (applied-migration-ids db))))))
|
||||||
(is (contains? (applied-migration-ids db) key))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-remove-migration-id
|
(deftest test-migrations-ordering
|
||||||
(let [db (mg/get-db "monger-test")
|
(let [coll "meta.migrations"
|
||||||
coll "meta.migrations"
|
all-keys [ "9" "4" "7" "1" "5" "3" "6" "2" "8"]]
|
||||||
key "1"]
|
(mc/remove db coll {})
|
||||||
(mc/remove db coll {})
|
|
||||||
(add-migration-id db key)
|
(doseq [key all-keys]
|
||||||
(is (mc/any? db coll {:_id key}))
|
(add-migration-id db key))
|
||||||
(is (contains? (applied-migration-ids db) key))
|
|
||||||
(remove-migration-id db key)
|
(doseq [key all-keys]
|
||||||
(is (not (contains? (applied-migration-ids db) key)))))
|
(is (mc/any? db coll {:_id key}))
|
||||||
|
(is (some #{key} (applied-migration-ids db))))
|
||||||
|
|
||||||
|
(testing "Applied migrations must come out in creation order"
|
||||||
|
(is (= all-keys (applied-migration-ids db))))))))
|
||||||
|
|
|
||||||
|
|
@ -1,276 +1,293 @@
|
||||||
(set! *warn-on-reflection* true)
|
|
||||||
|
|
||||||
(ns monger.test.regular-finders-test
|
(ns monger.test.regular-finders-test
|
||||||
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject]
|
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject]
|
||||||
org.bson.types.ObjectId
|
org.bson.types.ObjectId
|
||||||
java.util.Date)
|
java.util.Date)
|
||||||
(:require [monger core util]
|
(:require [monger.core :as mg]
|
||||||
[monger.collection :as mgcol]
|
[monger.collection :as mc]
|
||||||
|
[monger.util :as mu]
|
||||||
[monger.result :as mgres]
|
[monger.result :as mgres]
|
||||||
[monger.conversion :as mgcnv]
|
[monger.conversion :as mgcnv]
|
||||||
[monger.test.helper :as helper])
|
[clojure.test :refer :all]
|
||||||
(:use clojure.test
|
[monger.operators :refer :all]
|
||||||
monger.operators
|
[monger.conversion :refer [to-db-object]]))
|
||||||
monger.test.fixtures))
|
|
||||||
|
|
||||||
(helper/connect!)
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")]
|
||||||
|
(use-fixtures :each (fn [f]
|
||||||
|
(mc/remove db "people")
|
||||||
|
(mc/remove db "docs")
|
||||||
|
(mc/remove db "regular_finders_docs")
|
||||||
|
(mc/remove db "things")
|
||||||
|
(mc/remove db "libraries")
|
||||||
|
(f)
|
||||||
|
(mc/remove db "people")
|
||||||
|
(mc/remove db "docs")
|
||||||
|
(mc/remove db "regular_finders_docs")
|
||||||
|
(mc/remove db "things")
|
||||||
|
(mc/remove db "libraries")))
|
||||||
|
|
||||||
(use-fixtures :each purge-people purge-docs purge-things purge-libraries)
|
;;
|
||||||
|
;; find-one
|
||||||
|
;;
|
||||||
|
|
||||||
|
(deftest find-one-full-document-when-collection-is-empty
|
||||||
|
(let [collection "regular_finders_docs"]
|
||||||
|
(is (nil? (mc/find-one db collection {})))))
|
||||||
|
|
||||||
|
(deftest find-one-full-document-as-map-when-collection-is-empty
|
||||||
|
(let [collection "regular_finders_docs"]
|
||||||
|
(mc/remove db collection)
|
||||||
|
(is (nil? (mc/find-one-as-map db collection {})))))
|
||||||
|
|
||||||
|
|
||||||
;;
|
(deftest find-one-full-document-when-collection-has-matches
|
||||||
;; find-one
|
(let [collection "regular_finders_docs"
|
||||||
;;
|
doc-id (mu/random-uuid)
|
||||||
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
||||||
|
_ (mc/insert db collection doc)
|
||||||
|
found-one (mc/find-one db collection { :language "Clojure" })]
|
||||||
|
(is found-one)
|
||||||
|
(is (= (:_id doc) (mu/get-id found-one)))
|
||||||
|
(is (= (mgcnv/from-db-object found-one true) doc))
|
||||||
|
(is (= (mgcnv/to-db-object doc) found-one))))
|
||||||
|
|
||||||
(deftest find-one-full-document-when-collection-is-empty
|
(deftest find-one-full-document-as-map-when-collection-has-matches
|
||||||
(let [collection "docs"]
|
(let [collection "regular_finders_docs"
|
||||||
(is (nil? (mgcol/find-one collection {})))))
|
doc-id (mu/random-uuid)
|
||||||
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
||||||
(deftest find-one-full-document-as-map-when-collection-is-empty
|
(mc/insert db collection doc)
|
||||||
(let [collection "docs"]
|
(is (= doc (mc/find-one-as-map db collection { :language "Clojure" })))))
|
||||||
(is (nil? (mgcol/find-one-as-map collection {})))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest find-one-full-document-when-collection-has-matches
|
|
||||||
(let [collection "docs"
|
|
||||||
doc-id (monger.util/random-uuid)
|
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
|
||||||
_ (mgcol/insert collection doc)
|
|
||||||
found-one (mgcol/find-one collection { :language "Clojure" })]
|
|
||||||
(is (= (:_id doc) (monger.util/get-id found-one)))
|
|
||||||
(is (= (mgcnv/from-db-object found-one true) doc))
|
|
||||||
(is (= (mgcnv/to-db-object doc) found-one))))
|
|
||||||
|
|
||||||
(deftest find-one-full-document-as-map-when-collection-has-matches
|
|
||||||
(let [collection "docs"
|
|
||||||
doc-id (monger.util/random-uuid)
|
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
|
||||||
(mgcol/insert collection doc)
|
|
||||||
(is (= doc (mgcol/find-one-as-map collection { :language "Clojure" })))))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(deftest find-one-partial-document-when-collection-has-matches
|
(deftest find-one-partial-document-when-collection-has-matches
|
||||||
(let [collection "docs"
|
(let [collection "regular_finders_docs"
|
||||||
doc-id (monger.util/random-uuid)
|
doc-id (mu/random-uuid)
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
||||||
_ (mgcol/insert collection doc)
|
_ (mc/insert db collection doc)
|
||||||
loaded (mgcol/find-one collection { :language "Clojure" } [:language])]
|
loaded (mc/find-one db collection { :language "Clojure" } [:language])]
|
||||||
(is (nil? (.get ^DBObject loaded "data-store")))
|
(is (nil? (.get ^DBObject loaded "data-store")))
|
||||||
(is (= doc-id (monger.util/get-id loaded)))
|
(is (= doc-id (mu/get-id loaded)))
|
||||||
(is (= "Clojure" (.get ^DBObject loaded "language")))))
|
(is (= "Clojure" (.get ^DBObject loaded "language")))))
|
||||||
|
|
||||||
|
|
||||||
(deftest find-one-partial-document-using-field-negation-when-collection-has-matches
|
(deftest find-one-partial-document-using-field-negation-when-collection-has-matches
|
||||||
(let [collection "docs"
|
(let [collection "regular_finders_docs"
|
||||||
doc-id (monger.util/random-uuid)
|
doc-id (mu/random-uuid)
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
||||||
_ (mgcol/insert collection doc)
|
_ (mc/insert db collection doc)
|
||||||
^DBObject loaded (mgcol/find-one collection { :language "Clojure" } {:data-store 0 :_id 0})]
|
^DBObject loaded (mc/find-one db collection { :language "Clojure" } {:data-store 0 :_id 0})]
|
||||||
(is (nil? (.get loaded "data-store")))
|
(is (nil? (.get loaded "data-store")))
|
||||||
(is (nil? (.get loaded "_id")))
|
(is (nil? (.get loaded "_id")))
|
||||||
(is (nil? (monger.util/get-id loaded)))
|
(is (nil? (mu/get-id loaded)))
|
||||||
(is (= "Clojure" (.get loaded "language")))))
|
(is (= "Clojure" (.get loaded "language")))))
|
||||||
|
|
||||||
|
|
||||||
(deftest find-one-partial-document-as-map-when-collection-has-matches
|
(deftest find-one-partial-document-as-map-when-collection-has-matches
|
||||||
(let [collection "docs"
|
(let [collection "regular_finders_docs"
|
||||||
doc-id (monger.util/random-uuid)
|
doc-id (mu/random-uuid)
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
||||||
(mgcol/insert collection doc)
|
(mc/insert db collection doc)
|
||||||
(is (= { :data-store "MongoDB", :_id doc-id } (mgcol/find-one-as-map collection { :language "Clojure" } [:data-store])))))
|
(is (= { :data-store "MongoDB", :_id doc-id }
|
||||||
|
(mc/find-one-as-map db collection { :language "Clojure" } [:data-store])))))
|
||||||
|
|
||||||
|
|
||||||
(deftest find-one-partial-document-as-map-when-collection-has-matches-with-keywordize
|
(deftest find-one-partial-document-as-map-when-collection-has-matches-with-keywordize
|
||||||
(let [collection "docs"
|
(let [collection "regular_finders_docs"
|
||||||
doc-id (monger.util/random-uuid)
|
doc-id (mu/random-uuid)
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
||||||
fields [:data-store]
|
fields [:data-store]
|
||||||
_id (mgcol/insert collection doc)
|
_id (mc/insert db collection doc)
|
||||||
loaded (mgcol/find-one-as-map collection { :language "Clojure" } fields true)
|
loaded (mc/find-one-as-map db collection { :language "Clojure" } fields true)
|
||||||
]
|
]
|
||||||
(is (= { :data-store "MongoDB", :_id doc-id } loaded ))))
|
(is (= { :data-store "MongoDB", :_id doc-id } loaded ))))
|
||||||
|
|
||||||
|
|
||||||
(deftest find-one-partial-document-as-map-when-collection-has-matches-with-keywordize-false
|
(deftest find-one-partial-document-as-map-when-collection-has-matches-with-keywordize-false
|
||||||
(let [collection "docs"
|
(let [collection "regular_finders_docs"
|
||||||
doc-id (monger.util/random-uuid)
|
doc-id (mu/random-uuid)
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
||||||
fields [:data-store]
|
fields [:data-store]
|
||||||
_id (mgcol/insert collection doc)
|
_id (mc/insert db collection doc)
|
||||||
loaded (mgcol/find-one-as-map collection { :language "Clojure" } fields false)
|
loaded (mc/find-one-as-map db collection { :language "Clojure" } fields false)]
|
||||||
]
|
(is (= { "_id" doc-id, "data-store" "MongoDB" } loaded ))))
|
||||||
(is (= { "_id" doc-id, "data-store" "MongoDB" } loaded ))))
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; find-by-id
|
;; find-by-id
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(deftest find-full-document-by-string-id-when-that-document-does-not-exist
|
(deftest find-full-document-by-string-id-when-that-document-does-not-exist
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id (monger.util/random-uuid)]
|
doc-id (mu/random-uuid)]
|
||||||
(is (nil? (mgcol/find-by-id collection doc-id)))))
|
(is (nil? (mc/find-by-id db collection doc-id)))))
|
||||||
|
|
||||||
(deftest find-full-document-by-string-id-when-id-is-nil
|
(deftest find-full-document-by-string-id-when-id-is-nil
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id nil]
|
doc-id nil]
|
||||||
(is (thrown? IllegalArgumentException (mgcol/find-by-id collection doc-id)))))
|
(is (thrown? IllegalArgumentException (mc/find-by-id db collection doc-id)))))
|
||||||
|
|
||||||
(deftest find-full-document-by-object-id-when-that-document-does-not-exist
|
(deftest find-full-document-by-object-id-when-that-document-does-not-exist
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id (ObjectId.)]
|
doc-id (ObjectId.)]
|
||||||
(is (nil? (mgcol/find-by-id collection doc-id)))))
|
(is (nil? (mc/find-by-id db collection doc-id)))))
|
||||||
|
|
||||||
(deftest find-full-document-by-id-as-map-when-that-document-does-not-exist
|
(deftest find-full-document-by-id-as-map-when-that-document-does-not-exist
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id (monger.util/random-uuid)]
|
doc-id (mu/random-uuid)]
|
||||||
(is (nil? (mgcol/find-map-by-id collection doc-id)))))
|
(is (nil? (mc/find-map-by-id db collection doc-id)))))
|
||||||
|
|
||||||
(deftest find-full-document-by-id-as-map-when-id-is-nil
|
(deftest find-full-document-by-id-as-map-when-id-is-nil
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id nil]
|
doc-id nil]
|
||||||
(is (thrown? IllegalArgumentException
|
(is (thrown? IllegalArgumentException
|
||||||
(mgcol/find-map-by-id collection doc-id)))))
|
(mc/find-map-by-id db collection doc-id)))))
|
||||||
|
|
||||||
|
|
||||||
(deftest find-full-document-by-string-id-when-document-does-exist
|
(deftest find-full-document-by-string-id-when-document-does-exist
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id (monger.util/random-uuid)
|
doc-id (mu/random-uuid)
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
||||||
(mgcol/insert collection doc)
|
(mc/insert db collection doc)
|
||||||
(is (= (doc (mgcol/find-by-id collection doc-id))))))
|
(is (= (to-db-object doc) (mc/find-by-id db collection doc-id)))))
|
||||||
|
|
||||||
(deftest find-full-document-by-object-id-when-document-does-exist
|
(deftest find-full-document-by-object-id-when-document-does-exist
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id (ObjectId.)
|
doc-id (ObjectId.)
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
||||||
(mgcol/insert collection doc)
|
(mc/insert db collection doc)
|
||||||
(is (= (doc (mgcol/find-by-id collection doc-id))))))
|
(is (= (to-db-object doc) (mc/find-by-id db collection doc-id)))))
|
||||||
|
|
||||||
(deftest find-full-document-map-by-string-id-when-document-does-exist
|
(deftest find-full-document-map-by-string-id-when-document-does-exist
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id (monger.util/random-uuid)
|
doc-id (mu/random-uuid)
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
||||||
(mgcol/insert collection doc)
|
(mc/insert db collection doc)
|
||||||
(is (= (doc (mgcol/find-map-by-id collection doc-id))))))
|
(is (= doc (mc/find-map-by-id db collection doc-id)))))
|
||||||
|
|
||||||
(deftest find-full-document-map-by-object-id-when-document-does-exist
|
(deftest find-full-document-map-by-object-id-when-document-does-exist
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id (ObjectId.)
|
doc-id (ObjectId.)
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
||||||
(mgcol/insert collection doc)
|
(mc/insert db collection doc)
|
||||||
(is (= (doc (mgcol/find-map-by-id collection doc-id))))))
|
(is (= doc (mc/find-map-by-id db collection doc-id)))))
|
||||||
|
|
||||||
(deftest find-partial-document-by-id-when-document-does-exist
|
(deftest find-partial-document-by-id-when-document-does-exist
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id (monger.util/random-uuid)
|
doc-id (mu/random-uuid)
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
|
||||||
(mgcol/insert collection doc)
|
(mc/insert db collection doc)
|
||||||
(is (= ({ :language "Clojure" } (mgcol/find-by-id collection doc-id [ :language ]))))))
|
(is (= (to-db-object { :_id doc-id :language "Clojure" })
|
||||||
|
(mc/find-by-id db collection doc-id [ :language ])))))
|
||||||
|
|
||||||
|
|
||||||
(deftest find-partial-document-as-map-by-id-when-document-does-exist
|
(deftest find-partial-document-as-map-by-id-when-document-does-exist
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id (monger.util/random-uuid)
|
doc-id (mu/random-uuid)
|
||||||
fields [:data-store]
|
fields [:data-store]
|
||||||
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
||||||
_ (mgcol/insert collection doc)
|
_ (mc/insert db collection doc)
|
||||||
loaded (mgcol/find-map-by-id collection doc-id [ :language ])]
|
loaded (mc/find-map-by-id db collection doc-id [ :language ])]
|
||||||
(is (= { :language "Clojure", :_id doc-id } loaded ))
|
(is (= { :language "Clojure", :_id doc-id } loaded ))))
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
;;
|
;;
|
||||||
;; find
|
;; find
|
||||||
;;
|
;;
|
||||||
|
|
||||||
(deftest find-full-document-when-collection-is-empty
|
(deftest find-full-document-when-collection-is-empty
|
||||||
(let [collection "docs"
|
(let [collection "regular_finders_docs"
|
||||||
cursor (mgcol/find collection)]
|
cursor (mc/find db collection)]
|
||||||
(is (empty? (iterator-seq cursor)))))
|
(is (empty? (iterator-seq cursor)))))
|
||||||
|
|
||||||
(deftest find-document-seq-when-collection-is-empty
|
(deftest find-document-seq-when-collection-is-empty
|
||||||
(let [collection "docs"]
|
(let [collection "regular_finders_docs"]
|
||||||
(is (empty? (mgcol/find-seq collection)))))
|
(is (empty? (mc/find-seq db collection)))))
|
||||||
|
|
||||||
(deftest find-multiple-documents-when-collection-is-empty
|
(deftest find-multiple-documents-when-collection-is-empty
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(is (empty? (mgcol/find collection { :language "Scala" })))))
|
(is (empty? (mc/find db collection { :language "Scala" })))))
|
||||||
|
|
||||||
(deftest find-multiple-maps-when-collection-is-empty
|
(deftest find-multiple-maps-when-collection-is-empty
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(is (empty? (mgcol/find-maps collection { :language "Scala" })))))
|
(is (empty? (mc/find-maps db collection { :language "Scala" })))))
|
||||||
|
|
||||||
(deftest find-multiple-documents-by-regex
|
(deftest find-multiple-documents-by-regex
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(mgcol/insert-batch collection [{ :language "Clojure", :name "monger" }
|
(mc/insert-batch db collection [{ :language "Clojure", :name "monger" }
|
||||||
{ :language "Java", :name "nhibernate" }
|
{ :language "Java", :name "nhibernate" }
|
||||||
{ :language "JavaScript", :name "sprout-core" }])
|
{ :language "JavaScript", :name "sprout-core" }])
|
||||||
(is (= 2 (monger.core/count (mgcol/find collection { :language #"Java*" }))))))
|
(is (= 2 (monger.core/count (mc/find db collection { :language #"Java*" }))))))
|
||||||
|
|
||||||
(deftest find-multiple-documents
|
(deftest find-multiple-documents
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(mgcol/insert-batch collection [{ :language "Clojure", :name "monger" }
|
(mc/insert-batch db collection [{ :language "Clojure", :name "monger" }
|
||||||
{ :language "Clojure", :name "langohr" }
|
{ :language "Clojure", :name "langohr" }
|
||||||
{ :language "Clojure", :name "incanter" }
|
{ :language "Clojure", :name "incanter" }
|
||||||
{ :language "Scala", :name "akka" }])
|
{ :language "Scala", :name "akka" }])
|
||||||
(is (= 1 (monger.core/count (mgcol/find collection { :language "Scala" }))))
|
(is (= 1 (monger.core/count (mc/find db collection { :language "Scala" }))))
|
||||||
(is (= 3 (.count (mgcol/find collection { :language "Clojure" }))))
|
(is (= 3 (.count (mc/find db collection { :language "Clojure" }))))
|
||||||
(is (empty? (mgcol/find collection { :language "Java" })))))
|
(is (empty? (mc/find db collection { :language "Java" })))))
|
||||||
|
|
||||||
|
|
||||||
(deftest find-document-specify-fields
|
(deftest find-document-specify-fields
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
_ (mgcol/insert collection { :language "Clojure", :name "monger" })
|
_ (mc/insert db collection { :language "Clojure", :name "monger" })
|
||||||
result (mgcol/find collection { :language "Clojure"} [:language])]
|
result (mc/find db collection { :language "Clojure"} [:language])]
|
||||||
(is (= (seq [:_id :language]) (keys (mgcnv/from-db-object (.next result) true))))))
|
(is (= (set [:_id :language]) (-> (mgcnv/from-db-object (.next result) true) keys set)))))
|
||||||
|
|
||||||
(deftest find-and-iterate-over-multiple-documents-the-hard-way
|
(deftest find-and-iterate-over-multiple-documents-the-hard-way
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(mgcol/insert-batch collection [{ :language "Clojure", :name "monger" }
|
(mc/insert-batch db collection [{ :language "Clojure", :name "monger" }
|
||||||
{ :language "Clojure", :name "langohr" }
|
{ :language "Clojure", :name "langohr" }
|
||||||
{ :language "Clojure", :name "incanter" }
|
{ :language "Clojure", :name "incanter" }
|
||||||
{ :language "Scala", :name "akka" }])
|
{ :language "Scala", :name "akka" }])
|
||||||
(doseq [doc (take 3 (map (fn [dbo]
|
(doseq [doc (take 3 (map (fn [dbo]
|
||||||
(mgcnv/from-db-object dbo true))
|
(mgcnv/from-db-object dbo true))
|
||||||
(mgcol/find-seq collection { :language "Clojure" })))]
|
(mc/find-seq db collection { :language "Clojure" })))]
|
||||||
(is (= "Clojure" (:language doc))))))
|
(is (= "Clojure" (:language doc))))))
|
||||||
|
|
||||||
(deftest find-and-iterate-over-multiple-documents
|
(deftest find-and-iterate-over-multiple-documents
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(mgcol/insert-batch collection [{ :language "Clojure", :name "monger" }
|
(mc/insert-batch db collection [{ :language "Clojure", :name "monger" }
|
||||||
{ :language "Clojure", :name "langohr" }
|
{ :language "Clojure", :name "langohr" }
|
||||||
{ :language "Clojure", :name "incanter" }
|
{ :language "Clojure", :name "incanter" }
|
||||||
{ :language "Scala", :name "akka" }])
|
{ :language "Scala", :name "akka" }])
|
||||||
(doseq [doc (take 3 (mgcol/find-maps collection { :language "Clojure" }))]
|
(doseq [doc (take 3 (mc/find-maps db collection { :language "Clojure" }))]
|
||||||
(is (= "Clojure" (:language doc))))))
|
(is (= "Clojure" (:language doc))))))
|
||||||
|
|
||||||
|
|
||||||
(deftest find-multiple-maps
|
(deftest find-multiple-maps
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(mgcol/insert-batch collection [{ :language "Clojure", :name "monger" }
|
(mc/insert-batch db collection [{ :language "Clojure", :name "monger" }
|
||||||
{ :language "Clojure", :name "langohr" }
|
{ :language "Clojure", :name "langohr" }
|
||||||
{ :language "Clojure", :name "incanter" }
|
{ :language "Clojure", :name "incanter" }
|
||||||
{ :language "Scala", :name "akka" }])
|
{ :language "Scala", :name "akka" }])
|
||||||
(is (= 1 (clojure.core/count (mgcol/find-maps collection { :language "Scala" }))))
|
(is (= 1 (clojure.core/count (mc/find-maps db collection { :language "Scala" }))))
|
||||||
(is (= 3 (.count (mgcol/find-maps collection { :language "Clojure" }))))
|
(is (= 3 (.count (mc/find-maps db collection { :language "Clojure" }))))
|
||||||
(is (empty? (mgcol/find-maps collection { :language "Java" })))
|
(is (empty? (mc/find-maps db collection { :language "Java" })))
|
||||||
(is (empty? (mgcol/find-maps monger.core/*mongodb-database* collection { :language "Java" } [:language :name])))))
|
(is (empty? (mc/find-maps db collection { :language "Java" } [:language :name])))))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(deftest find-multiple-partial-documents
|
(deftest find-multiple-partial-documents
|
||||||
(let [collection "libraries"]
|
(let [collection "libraries"]
|
||||||
(mgcol/insert-batch collection [{ :language "Clojure", :name "monger" }
|
(mc/insert-batch db collection [{ :language "Clojure", :name "monger" }
|
||||||
{ :language "Clojure", :name "langohr" }
|
{ :language "Clojure", :name "langohr" }
|
||||||
{ :language "Clojure", :name "incanter" }
|
{ :language "Clojure", :name "incanter" }
|
||||||
{ :language "Scala", :name "akka" }])
|
{ :language "Scala", :name "akka" }])
|
||||||
(let [scala-libs (mgcol/find collection { :language "Scala" } [:name])
|
(let [scala-libs (mc/find db collection { :language "Scala" } [:name])
|
||||||
clojure-libs (mgcol/find collection { :language "Clojure"} [:language])]
|
clojure-libs (mc/find db collection { :language "Clojure"} [:language])]
|
||||||
(is (= 1 (.count scala-libs)))
|
(is (= 1 (.count scala-libs)))
|
||||||
(is (= 3 (.count clojure-libs)))
|
(is (= 3 (.count clojure-libs)))
|
||||||
(doseq [i clojure-libs]
|
(doseq [i clojure-libs]
|
||||||
(let [doc (mgcnv/from-db-object i true)]
|
(let [doc (mgcnv/from-db-object i true)]
|
||||||
(is (= (:language doc) "Clojure"))))
|
(is (= (:language doc) "Clojure"))))
|
||||||
(is (empty? (mgcol/find collection { :language "Erlang" } [:name]))))))
|
(is (empty? (mc/find db collection { :language "Erlang" } [:name]))))))
|
||||||
|
|
||||||
|
(deftest find-maps-with-keywordize-false
|
||||||
|
(let [collection "libraries"]
|
||||||
|
(mc/insert-batch db collection [{ :language "Clojure", :name "monger" }
|
||||||
|
{ :language "Clojure", :name "langohr" }])
|
||||||
|
(let [results (mc/find-maps db collection {:name "langohr"} [] false)]
|
||||||
|
(is (= 1 (.count results)))
|
||||||
|
(is (= (get (first results) "language") "Clojure"))))))
|
||||||
|
|
|
||||||
|
|
@ -1,54 +1,25 @@
|
||||||
(ns monger.test.result-test
|
(ns monger.test.result-test
|
||||||
(:import [com.mongodb BasicDBObject WriteResult WriteConcern] java.util.Date)
|
(:import [com.mongodb BasicDBObject WriteResult WriteConcern] java.util.Date)
|
||||||
(:require [monger core collection conversion]
|
(:require [monger.core :as mg]
|
||||||
[monger.test.helper :as helper])
|
[monger.collection :as mc]
|
||||||
(:use clojure.test))
|
[monger.result :as mgres]
|
||||||
|
monger.util
|
||||||
|
[clojure.test :refer :all]))
|
||||||
|
|
||||||
(helper/connect!)
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")]
|
||||||
;;
|
(deftest test-updated-existing?-with-write-result
|
||||||
;; MongoCommandResult
|
(mc/remove db "libraries")
|
||||||
;;
|
(let [collection "libraries"
|
||||||
|
doc-id (monger.util/random-uuid)
|
||||||
|
date (Date.)
|
||||||
(deftest test-ok?
|
doc { :created-at date :data-store "MongoDB" :language "Clojure" :_id doc-id }
|
||||||
(let [result-that-is-not-ok-1 (doto (BasicDBObject.) (.put "ok" 0))
|
modified-doc { :created-at date :data-store "MongoDB" :language "Erlang" :_id doc-id }]
|
||||||
result-that-is-not-ok-2 (doto (BasicDBObject.) (.put "ok" "false"))
|
(let [result (mc/update db collection { :language "Clojure" } doc {:upsert true})]
|
||||||
result-that-is-ok-1 (doto (BasicDBObject.) (.put "ok" 1))
|
(is (not (mgres/updated-existing? result)))
|
||||||
result-that-is-ok-2 (doto (BasicDBObject.) (.put "ok" "true"))
|
(is (= 1 (mgres/affected-count result))))
|
||||||
result-that-is-ok-3 (doto (BasicDBObject.) (.put "ok" 1.0))]
|
(is (mgres/updated-existing? (mc/update db collection { :language "Clojure" } doc {:upsert true})))
|
||||||
(is (not (monger.result/ok? result-that-is-not-ok-1)))
|
(is (mgres/updated-existing? (mc/update db collection { :language "Clojure" } modified-doc {:multi false :upsert true})))
|
||||||
(is (not (monger.result/ok? result-that-is-not-ok-2)))
|
(is (= 1 (mgres/affected-count (mc/remove db collection { :_id doc-id }))))
|
||||||
(is (monger.result/ok? result-that-is-ok-1))
|
(mc/remove db collection)
|
||||||
(is (monger.result/ok? result-that-is-ok-2))
|
(mg/disconnect conn))))
|
||||||
(is (monger.result/ok? result-that-is-ok-3))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-has-error?
|
|
||||||
(let [result-that-has-no-error1 (doto (BasicDBObject.) (.put "ok" 0))
|
|
||||||
result-that-has-no-error2 (doto (BasicDBObject.) (.put "err" ""))
|
|
||||||
result-that-has-error1 (doto (BasicDBObject.) (.put "err" (BasicDBObject.)))]
|
|
||||||
(is (not (monger.result/has-error? result-that-has-no-error1)))
|
|
||||||
(is (not (monger.result/has-error? result-that-has-no-error2)))
|
|
||||||
(is (monger.result/has-error? result-that-has-error1))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-updated-existing?-with-db-object
|
|
||||||
(let [input1 (doto (BasicDBObject.) (.put "updatedExisting" true))
|
|
||||||
input2 (doto (BasicDBObject.) (.put "updatedExisting" false))
|
|
||||||
input3 (BasicDBObject.)]
|
|
||||||
(is (monger.result/updated-existing? input1))
|
|
||||||
(is (not (monger.result/updated-existing? input2)))
|
|
||||||
(is (not (monger.result/updated-existing? input3)))))
|
|
||||||
|
|
||||||
(deftest test-updated-existing?-with-write-result
|
|
||||||
(monger.collection/remove "libraries")
|
|
||||||
(let [collection "libraries"
|
|
||||||
doc-id (monger.util/random-uuid)
|
|
||||||
date (Date.)
|
|
||||||
doc { :created-at date, :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
|
||||||
modified-doc { :created-at date, :data-store "MongoDB", :language "Erlang", :_id doc-id }]
|
|
||||||
(is (not (monger.result/updated-existing? (monger.collection/update collection { :language "Clojure" } doc :upsert true))))
|
|
||||||
(is (monger.result/updated-existing? (monger.collection/update collection { :language "Clojure" } doc :upsert true)))
|
|
||||||
(monger.result/updated-existing? (monger.collection/update collection { :language "Clojure" } modified-doc :multi false :upsert true))
|
|
||||||
(monger.collection/remove collection)))
|
|
||||||
|
|
|
||||||
50
test/monger/test/ring/clojure_session_store_test.clj
Normal file
50
test/monger/test/ring/clojure_session_store_test.clj
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
(ns monger.test.ring.clojure-session-store-test
|
||||||
|
(:require [monger.core :as mg]
|
||||||
|
[monger.collection :as mc]
|
||||||
|
[clojure.test :refer :all]
|
||||||
|
[ring.middleware.session.store :refer :all]
|
||||||
|
[monger.ring.session-store :refer :all]))
|
||||||
|
|
||||||
|
|
||||||
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")]
|
||||||
|
(defn purge-sessions
|
||||||
|
[f]
|
||||||
|
(mc/remove db "sessions")
|
||||||
|
(f)
|
||||||
|
(mc/remove db "sessions"))
|
||||||
|
|
||||||
|
(use-fixtures :each purge-sessions)
|
||||||
|
|
||||||
|
|
||||||
|
(deftest test-reading-a-session-that-does-not-exist
|
||||||
|
(let [store (session-store db "sessions")]
|
||||||
|
(is (= {} (read-session store "a-missing-key-1228277")))))
|
||||||
|
|
||||||
|
|
||||||
|
(deftest test-reading-a-session-that-does-exist
|
||||||
|
(let [store (session-store db "sessions")
|
||||||
|
sk (write-session store nil {:library "Monger"})
|
||||||
|
m (read-session store sk)]
|
||||||
|
(is sk)
|
||||||
|
(is (and (:_id m)))
|
||||||
|
(is (= (dissoc m :_id)
|
||||||
|
{:library "Monger"}))))
|
||||||
|
|
||||||
|
|
||||||
|
(deftest test-updating-a-session
|
||||||
|
(let [store (session-store db "sessions")
|
||||||
|
sk1 (write-session store nil {:library "Monger"})
|
||||||
|
sk2 (write-session store sk1 {:library "Ring"})
|
||||||
|
m (read-session store sk2)]
|
||||||
|
(is (and sk1 sk2))
|
||||||
|
(is (and (:_id m)))
|
||||||
|
(is (= sk1 sk2))
|
||||||
|
(is (= (dissoc m :_id)
|
||||||
|
{:library "Ring"}))))
|
||||||
|
|
||||||
|
(deftest test-deleting-a-session
|
||||||
|
(let [store (session-store db "sessions")
|
||||||
|
sk (write-session store nil {:library "Monger"})]
|
||||||
|
(is (nil? (delete-session store sk)))
|
||||||
|
(is (= {} (read-session store sk))))))
|
||||||
|
|
@ -1,54 +1,54 @@
|
||||||
(ns monger.test.ring.session-store-test
|
(ns monger.test.ring.session-store-test
|
||||||
(:require [monger core util]
|
(:require [monger.core :as mg]
|
||||||
[monger.collection :as mc]
|
[monger.collection :as mc]
|
||||||
[monger.test.helper :as helper])
|
[clojure.test :refer :all]
|
||||||
(:use clojure.test
|
[ring.middleware.session.store :refer :all]
|
||||||
ring.middleware.session.store
|
[monger.ring.session-store :refer :all]))
|
||||||
monger.ring.session-store))
|
|
||||||
|
|
||||||
|
|
||||||
(helper/connect!)
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")]
|
||||||
|
(defn purge-sessions
|
||||||
|
[f]
|
||||||
|
(mc/remove db "sessions")
|
||||||
|
(f)
|
||||||
|
(mc/remove db "sessions"))
|
||||||
|
|
||||||
(defn purge-sessions
|
(use-fixtures :each purge-sessions)
|
||||||
[f]
|
|
||||||
(mc/remove "web_sessions")
|
|
||||||
(mc/remove "sessions")
|
|
||||||
(f)
|
|
||||||
(mc/remove "web_sessions")
|
|
||||||
(mc/remove "sessions"))
|
|
||||||
|
|
||||||
(use-fixtures :each purge-sessions)
|
(deftest test-reading-a-session-that-does-not-exist
|
||||||
|
(let [store (monger-store db "sessions")]
|
||||||
|
(is (= {} (read-session store "a-missing-key-1228277")))))
|
||||||
|
|
||||||
|
(deftest test-reading-a-session-that-does-exist
|
||||||
|
(let [store (monger-store db "sessions")
|
||||||
|
sk (write-session store nil {:library "Monger"})
|
||||||
|
m (read-session store sk)]
|
||||||
|
(is sk)
|
||||||
|
(is (and (:_id m) (:date m)))
|
||||||
|
(is (= (dissoc m :_id :date)
|
||||||
|
{:library "Monger"}))))
|
||||||
|
|
||||||
(deftest test-reading-a-session-that-does-not-exist
|
(deftest test-updating-a-session
|
||||||
(let [store (monger-store)]
|
(let [store (monger-store db "sessions")
|
||||||
(is (= {} (read-session store "a-missing-key-1228277")))))
|
sk1 (write-session store nil {:library "Monger"})
|
||||||
|
sk2 (write-session store sk1 {:library "Ring"})
|
||||||
|
m (read-session store sk2)]
|
||||||
|
(is (and sk1 sk2))
|
||||||
|
(is (and (:_id m) (:date m)))
|
||||||
|
(is (= sk1 sk2))
|
||||||
|
(is (= (dissoc m :_id :date)
|
||||||
|
{:library "Ring"}))))
|
||||||
|
|
||||||
|
(deftest test-deleting-a-session
|
||||||
|
(let [store (monger-store db "sessions")
|
||||||
|
sk (write-session store nil {:library "Monger"})]
|
||||||
|
(is (nil? (delete-session store sk)))
|
||||||
|
(is (= {} (read-session store sk)))))
|
||||||
|
|
||||||
(deftest test-reading-a-session-that-does-exist
|
(deftest test-reader-extensions
|
||||||
(let [store (monger-store)
|
(let [d (java.util.Date.)
|
||||||
sk (write-session store nil {:library "Monger"})
|
oid (org.bson.types.ObjectId.)]
|
||||||
m (read-session store sk)]
|
(binding [*print-dup* true]
|
||||||
(is sk)
|
(pr-str d)
|
||||||
(is (and (:_id m) (:date m)))
|
(pr-str oid)))))
|
||||||
(is (= (dissoc m :_id :date)
|
|
||||||
{:library "Monger"}))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-updating-a-session
|
|
||||||
(let [store (monger-store "sessions")
|
|
||||||
sk1 (write-session store nil {:library "Monger"})
|
|
||||||
sk2 (write-session store sk1 {:library "Ring"})
|
|
||||||
m (read-session store sk2)]
|
|
||||||
(is (and sk1 sk2))
|
|
||||||
(is (and (:_id m) (:date m)))
|
|
||||||
(is (= sk1 sk2))
|
|
||||||
(is (= (dissoc m :_id :date)
|
|
||||||
{:library "Ring"}))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest test-deleting-a-session
|
|
||||||
(let [store (monger-store "sessions")
|
|
||||||
sk (write-session store nil {:library "Monger"})]
|
|
||||||
(is (nil? (delete-session store sk)))
|
|
||||||
(is (= {} (read-session store sk)))))
|
|
||||||
|
|
|
||||||
|
|
@ -1,42 +1,40 @@
|
||||||
(ns monger.test.stress-test
|
(ns monger.test.stress-test
|
||||||
(:import [com.mongodb Mongo DB DBCollection WriteResult DBObject WriteConcern DBCursor]
|
(:require [monger.core :as mg]
|
||||||
java.util.Date)
|
[monger.collection :as mc]
|
||||||
(:require monger.core
|
[monger.conversion :refer [to-db-object from-db-object]]
|
||||||
[monger.test.helper :as helper])
|
[clojure.test :refer :all])
|
||||||
(:use clojure.test))
|
(:import [com.mongodb WriteConcern]
|
||||||
|
java.util.Date))
|
||||||
|
|
||||||
|
|
||||||
;;
|
(let [conn (mg/connect)
|
||||||
;; Fixture functions
|
db (mg/get-db conn "monger-test")]
|
||||||
;;
|
(defn purge-collection
|
||||||
|
[coll f]
|
||||||
|
(mc/remove db coll)
|
||||||
|
(f)
|
||||||
|
(mc/remove db coll))
|
||||||
|
|
||||||
(defn purge-collection
|
(defn purge-things-collection
|
||||||
[collection-name, f]
|
[f]
|
||||||
(monger.collection/remove collection-name)
|
(purge-collection "things" f))
|
||||||
(f)
|
|
||||||
(monger.collection/remove collection-name))
|
|
||||||
|
|
||||||
(defn purge-things-collection
|
(use-fixtures :each purge-things-collection)
|
||||||
[f]
|
|
||||||
(purge-collection "things" f))
|
|
||||||
|
|
||||||
(use-fixtures :each purge-things-collection)
|
(deftest ^{:performance true} insert-large-batches-of-documents-without-object-ids
|
||||||
|
(doseq [n [10 100 1000 10000 20000]]
|
||||||
|
(let [collection "things"
|
||||||
|
docs (map (fn [i]
|
||||||
|
(to-db-object { :title "Untitled" :created-at (Date.) :number i }))
|
||||||
|
(take n (iterate inc 1)))]
|
||||||
|
(mc/remove db collection)
|
||||||
|
(println "Inserting " n " documents...")
|
||||||
|
(time (mc/insert-batch db collection docs))
|
||||||
|
(is (= n (mc/count db collection))))))
|
||||||
|
|
||||||
|
(deftest ^{:performance true} convert-large-number-of-dbojects-to-maps
|
||||||
|
(doseq [n [10 100 1000 20000 40000]]
|
||||||
;;
|
(let [docs (map (fn [i]
|
||||||
;; Tests
|
(to-db-object {:title "Untitled" :created-at (Date.) :number i}))
|
||||||
;;
|
(take n (iterate inc 1)))]
|
||||||
|
(time (doall (map (fn [x] (from-db-object x true)) docs)))))))
|
||||||
(monger.core/set-default-write-concern! WriteConcern/NORMAL)
|
|
||||||
|
|
||||||
(deftest ^{:performance true} insert-large-batches-of-documents-without-object-ids
|
|
||||||
(doseq [n [1000 10000 100000]]
|
|
||||||
(let [collection "things"
|
|
||||||
docs (map (fn [i]
|
|
||||||
(monger.conversion/to-db-object { :title "Untitled" :created-at (Date.) :number i }))
|
|
||||||
(take n (iterate inc 1)))]
|
|
||||||
(monger.collection/remove collection)
|
|
||||||
(println "Inserting " n " documents...")
|
|
||||||
(time (monger.collection/insert-batch collection docs))
|
|
||||||
(is (= n (monger.collection/count collection))))))
|
|
||||||
|
|
|
||||||
|
|
@ -1,134 +1,169 @@
|
||||||
(set! *warn-on-reflection* true)
|
|
||||||
|
|
||||||
(ns monger.test.updating-test
|
(ns monger.test.updating-test
|
||||||
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject]
|
(:import [com.mongodb WriteResult WriteConcern DBObject]
|
||||||
org.bson.types.ObjectId
|
org.bson.types.ObjectId
|
||||||
java.util.Date)
|
java.util.Date)
|
||||||
(:require [monger core util]
|
(:require [monger.core :as mg]
|
||||||
[monger.collection :as mc]
|
[monger.collection :as mc]
|
||||||
|
[monger.util :as mu]
|
||||||
[monger.result :as mr]
|
[monger.result :as mr]
|
||||||
[monger.test.helper :as helper])
|
[clojure.test :refer :all]
|
||||||
(:use clojure.test
|
[monger.operators :refer :all]
|
||||||
monger.operators
|
[monger.conversion :refer [to-db-object]]))
|
||||||
monger.test.fixtures
|
|
||||||
[monger.conversion :only [to-db-object]]))
|
|
||||||
|
|
||||||
(helper/connect!)
|
(let [conn (mg/connect)
|
||||||
|
db (mg/get-db conn "monger-test")]
|
||||||
|
(defn purge-collections
|
||||||
|
[f]
|
||||||
|
(mc/remove db "people")
|
||||||
|
(mc/remove db "docs")
|
||||||
|
(mc/remove db "things")
|
||||||
|
(mc/remove db "libraries")
|
||||||
|
(f)
|
||||||
|
(mc/remove db "people")
|
||||||
|
(mc/remove db "docs")
|
||||||
|
(mc/remove db "things")
|
||||||
|
(mc/remove db "libraries"))
|
||||||
|
|
||||||
(use-fixtures :each purge-people purge-docs purge-things purge-libraries)
|
(use-fixtures :each purge-collections)
|
||||||
|
|
||||||
|
(deftest ^{:updating true} update-document-by-id-without-upsert
|
||||||
|
(let [collection "libraries"
|
||||||
|
doc-id (mu/random-uuid)
|
||||||
|
date (Date.)
|
||||||
|
doc { :created-at date, :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
||||||
|
modified-doc { :created-at date, :data-store "MongoDB", :language "Erlang", :_id doc-id }]
|
||||||
|
(mc/insert db collection doc)
|
||||||
|
(is (= (to-db-object doc) (mc/find-by-id db collection doc-id)))
|
||||||
|
(mc/update db collection { :_id doc-id } { $set { :language "Erlang" } })
|
||||||
|
(is (= (to-db-object modified-doc) (mc/find-by-id db collection doc-id)))))
|
||||||
|
|
||||||
|
(deftest ^{:updating true} update-document-by-id-without-upsert-using-update-by-id
|
||||||
|
(let [collection "libraries"
|
||||||
|
doc-id (mu/random-uuid)
|
||||||
|
date (Date.)
|
||||||
|
doc { :created-at date, :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
||||||
|
modified-doc { :created-at date, :data-store "MongoDB", :language "Erlang", :_id doc-id }]
|
||||||
|
(mc/insert db collection doc)
|
||||||
|
(is (= (to-db-object doc) (mc/find-by-id db collection doc-id)))
|
||||||
|
(mc/update-by-id db collection doc-id { $set { :language "Erlang" } })
|
||||||
|
(is (= (to-db-object modified-doc) (mc/find-by-id db collection doc-id)))))
|
||||||
|
|
||||||
|
(deftest ^{:updating true} update-nested-document-fields-without-upsert-using-update-by-id
|
||||||
|
(let [collection "libraries"
|
||||||
|
doc-id (ObjectId.)
|
||||||
|
date (Date.)
|
||||||
|
doc { :created-at date :data-store "MongoDB" :language { :primary "Clojure" } :_id doc-id }
|
||||||
|
modified-doc { :created-at date :data-store "MongoDB" :language { :primary "Erlang" } :_id doc-id }]
|
||||||
|
(mc/insert db collection doc)
|
||||||
|
(is (= (to-db-object doc) (mc/find-by-id db collection doc-id)))
|
||||||
|
(mc/update-by-id db collection doc-id { $set { "language.primary" "Erlang" }})
|
||||||
|
(is (= (to-db-object modified-doc) (mc/find-by-id db collection doc-id)))))
|
||||||
|
|
||||||
|
|
||||||
;;
|
(deftest ^{:updating true} update-multiple-documents
|
||||||
;; update, save
|
(let [collection "libraries"]
|
||||||
;;
|
(mc/insert-batch db collection [{ :language "Clojure", :name "monger" }
|
||||||
|
{ :language "Clojure", :name "langohr" }
|
||||||
(deftest update-document-by-id-without-upsert
|
{ :language "Clojure", :name "incanter" }
|
||||||
(let [collection "libraries"
|
{ :language "Scala", :name "akka" }])
|
||||||
doc-id (monger.util/random-uuid)
|
(is (= 3 (mc/count db collection { :language "Clojure" })))
|
||||||
date (Date.)
|
(is (= 1 (mc/count db collection { :language "Scala" })))
|
||||||
doc { :created-at date, :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
(is (= 0 (mc/count db collection { :language "Python" })))
|
||||||
modified-doc { :created-at date, :data-store "MongoDB", :language "Erlang", :_id doc-id }]
|
(mc/update db collection { :language "Clojure" } { $set { :language "Python" } } {:multi true})
|
||||||
(mc/insert collection doc)
|
(is (= 0 (mc/count db collection { :language "Clojure" })))
|
||||||
(is (= (doc (mc/find-by-id collection doc-id))))
|
(is (= 1 (mc/count db collection { :language "Scala" })))
|
||||||
(mc/update collection { :_id doc-id } { :language "Erlang" })
|
(is (= 3 (mc/count db collection { :language "Python" })))))
|
||||||
(is (= (modified-doc (mc/find-by-id collection doc-id))))))
|
|
||||||
|
|
||||||
(deftest update-document-by-id-without-upsert-using-update-by-id
|
|
||||||
(let [collection "libraries"
|
|
||||||
doc-id (monger.util/random-uuid)
|
|
||||||
date (Date.)
|
|
||||||
doc { :created-at date, :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
|
||||||
modified-doc { :created-at date, :data-store "MongoDB", :language "Erlang", :_id doc-id }]
|
|
||||||
(mc/insert collection doc)
|
|
||||||
(is (= (doc (mc/find-by-id collection doc-id))))
|
|
||||||
(mc/update-by-id collection doc-id { :language "Erlang" })
|
|
||||||
(is (= (modified-doc (mc/find-by-id collection doc-id))))))
|
|
||||||
|
|
||||||
(deftest update-nested-document-fields-without-upsert-using-update-by-id
|
|
||||||
(let [collection "libraries"
|
|
||||||
doc-id (ObjectId.)
|
|
||||||
date (Date.)
|
|
||||||
doc { :created-at date :data-store "MongoDB" :language { :primary "Clojure" } :_id doc-id }
|
|
||||||
modified-doc { :created-at date :data-store "MongoDB" :language { :primary "Erlang" } :_id doc-id }]
|
|
||||||
(mc/insert collection doc)
|
|
||||||
(is (= (doc (mc/find-by-id collection doc-id))))
|
|
||||||
(mc/update-by-id collection doc-id { $set { "language.primary" "Erlang" }})
|
|
||||||
(is (= (modified-doc (mc/find-by-id collection doc-id))))))
|
|
||||||
|
|
||||||
|
|
||||||
(deftest update-multiple-documents
|
(deftest ^{:updating true} save-a-new-document
|
||||||
(let [collection "libraries"]
|
(let [collection "people"
|
||||||
(mc/insert collection { :language "Clojure", :name "monger" })
|
document {:name "Joe" :age 30}]
|
||||||
(mc/insert collection { :language "Clojure", :name "langohr" })
|
(is (mc/save db "people" document))
|
||||||
(mc/insert collection { :language "Clojure", :name "incanter" })
|
(is (= 1 (mc/count db collection)))))
|
||||||
(mc/insert collection { :language "Scala", :name "akka" })
|
|
||||||
(is (= 3 (mc/count collection { :language "Clojure" })))
|
(deftest ^{:updating true} save-and-return-a-new-document
|
||||||
(is (= 1 (mc/count collection { :language "Scala" })))
|
(let [collection "people"
|
||||||
(is (= 0 (mc/count collection { :language "Python" })))
|
document {:name "Joe" :age 30}
|
||||||
(mc/update collection { :language "Clojure" } { $set { :language "Python" } } :multi true)
|
returned (mc/save-and-return db "people" document)]
|
||||||
(is (= 0 (mc/count collection { :language "Clojure" })))
|
(is (:_id returned))
|
||||||
(is (= 1 (mc/count collection { :language "Scala" })))
|
(is (= document (dissoc returned :_id)))
|
||||||
(is (= 3 (mc/count collection { :language "Python" })))))
|
(is (= 1 (mc/count db collection)))))
|
||||||
|
|
||||||
|
|
||||||
(deftest save-a-new-document
|
(deftest ^{:updating true} save-a-new-basic-db-object
|
||||||
(let [collection "people"
|
(let [collection "people"
|
||||||
document { :name "Joe", :age 30 }]
|
doc (to-db-object {:name "Joe" :age 30})]
|
||||||
(is (monger.result/ok? (mc/save "people" document)))
|
(is (nil? (mu/get-id doc)))
|
||||||
(is (= 1 (mc/count collection)))))
|
(mc/save db "people" doc WriteConcern/SAFE)
|
||||||
|
(is (not (nil? (mu/get-id doc))))))
|
||||||
|
|
||||||
(deftest save-a-new-basic-db-object
|
|
||||||
(let [collection "people"
|
|
||||||
doc (to-db-object { :name "Joe", :age 30 })]
|
|
||||||
(is (nil? (monger.util/get-id doc)))
|
|
||||||
(mc/save monger.core/*mongodb-database* "people" doc WriteConcern/SAFE)
|
|
||||||
(is (not (nil? (monger.util/get-id doc))))))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(deftest update-an-existing-document-using-save
|
(deftest ^{:updating true} update-an-existing-document-using-save
|
||||||
(let [collection "people"
|
(let [collection "people"
|
||||||
doc-id "people-1"
|
doc-id "people-1"
|
||||||
document { :_id doc-id, :name "Joe", :age 30 }]
|
document { :_id doc-id, :name "Joe", :age 30 }]
|
||||||
(is (monger.result/ok? (mc/insert "people" document)))
|
(is (mc/insert db collection document))
|
||||||
(is (= 1 (mc/count collection)))
|
(is (= 1 (mc/count db collection)))
|
||||||
(mc/save collection { :_id doc-id, :name "Alan", :age 40 })
|
(mc/save db collection { :_id doc-id, :name "Alan", :age 40 })
|
||||||
(is (= 1 (mc/count collection { :name "Alan", :age 40 })))))
|
(is (= 1 (mc/count db collection { :name "Alan", :age 40 })))))
|
||||||
|
|
||||||
|
(deftest ^{:updating true} update-an-existing-document-using-save-and-return
|
||||||
|
(let [collection "people"
|
||||||
|
document (mc/insert-and-return db collection {:name "Joe" :age 30})
|
||||||
|
doc-id (:_id document)
|
||||||
|
updated (mc/save-and-return db collection {:_id doc-id :name "Alan" :age 40})]
|
||||||
|
(is (= {:_id doc-id :name "Alan" :age 40} updated))
|
||||||
|
(is (= 1 (mc/count db collection)))
|
||||||
|
(is (= 1 (mc/count db collection {:name "Alan" :age 40})))))
|
||||||
|
|
||||||
|
|
||||||
(deftest set-an-attribute-on-existing-document-using-update
|
(deftest ^{:updating true} set-an-attribute-on-existing-document-using-update
|
||||||
(let [collection "people"
|
(let [collection "people"
|
||||||
doc-id (monger.util/object-id)
|
doc-id (mu/object-id)
|
||||||
document { :_id doc-id, :name "Joe", :age 30 }]
|
document { :_id doc-id, :name "Joe", :age 30 }]
|
||||||
(is (monger.result/ok? (mc/insert "people" document)))
|
(is (mc/insert db collection document))
|
||||||
(is (= 1 (mc/count collection)))
|
(is (= 1 (mc/count db collection)))
|
||||||
(is (= 0 (mc/count collection { :has_kids true })))
|
(is (= 0 (mc/count db collection { :has_kids true })))
|
||||||
(mc/update collection { :_id doc-id } { $set { :has_kids true } })
|
(mc/update db collection { :_id doc-id } { $set { :has_kids true } })
|
||||||
(is (= 1 (mc/count collection { :has_kids true })))))
|
(is (= 1 (mc/count db collection { :has_kids true })))))
|
||||||
|
|
||||||
|
|
||||||
(deftest increment-multiple-fields-using-exists-operator-and-update
|
(deftest ^{:updating true} increment-multiple-fields-using-exists-operator-and-update
|
||||||
(let [collection "matches"
|
(let [collection "matches"
|
||||||
doc-id (monger.util/object-id)
|
doc-id (mu/object-id)
|
||||||
document { :_id doc-id :abc 0 :def 10 }]
|
document { :_id doc-id :abc 0 :def 10 }]
|
||||||
(mc/remove collection)
|
(mc/remove db collection)
|
||||||
(is (monger.result/ok? (mc/insert collection document)))
|
(is (mc/insert db collection document))
|
||||||
(is (= 1 (mc/count collection {:abc {$exists true} :def {$exists true}})))
|
(is (= 1 (mc/count db collection {:abc {$exists true} :def {$exists true}})))
|
||||||
(mc/update collection {:abc {$exists true} :def {$exists true}} {$inc {:abc 1 :def 0}})
|
(mc/update db collection {:abc {$exists true} :def {$exists true}} {$inc {:abc 1 :def 0}})
|
||||||
(is (= 1 (mc/count collection { :abc 1 })))))
|
(is (= 1 (mc/count db collection { :abc 1 })))))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(deftest upsert-a-document
|
(deftest ^{:updating true} upsert-a-document-using-update
|
||||||
(let [collection "libraries"
|
(let [collection "libraries"
|
||||||
doc-id (monger.util/random-uuid)
|
doc-id (mu/random-uuid)
|
||||||
date (Date.)
|
date (Date.)
|
||||||
doc { :created-at date, :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
doc { :created-at date, :data-store "MongoDB", :language "Clojure", :_id doc-id }
|
||||||
modified-doc { :created-at date, :data-store "MongoDB", :language "Erlang", :_id doc-id }]
|
modified-doc { :created-at date, :data-store "MongoDB", :language "Erlang", :_id doc-id }]
|
||||||
(is (not (monger.result/updated-existing? (mc/update collection { :language "Clojure" } doc :upsert true))))
|
(is (not (mr/updated-existing? (mc/update db collection { :language "Clojure" } doc {:upsert true}))))
|
||||||
(is (= 1 (mc/count collection)))
|
(is (= 1 (mc/count db collection)))
|
||||||
(is (monger.result/updated-existing? (mc/update collection { :language "Clojure" } modified-doc :multi false :upsert true)))
|
(is (mr/updated-existing? (mc/update db collection { :language "Clojure" } modified-doc {:multi false :upsert true})))
|
||||||
(is (= 1 (mc/count collection)))
|
(is (= 1 (mc/count db collection)))
|
||||||
(is (= (modified-doc (mc/find-by-id collection doc-id))))
|
(is (= (to-db-object modified-doc) (mc/find-by-id db collection doc-id)))
|
||||||
(mc/remove collection)))
|
(mc/remove db collection)))
|
||||||
|
|
||||||
|
(deftest ^{:updating true} upsert-a-document-using-upsert
|
||||||
|
(let [collection "libraries"
|
||||||
|
doc-id (mu/random-uuid)
|
||||||
|
date (Date.)
|
||||||
|
doc {:created-at date :data-store "MongoDB" :language "Clojure" :_id doc-id}
|
||||||
|
modified-doc {:created-at date :data-store "MongoDB" :language "Erlang" :_id doc-id}]
|
||||||
|
(mc/remove db collection)
|
||||||
|
(is (not (mr/updated-existing? (mc/upsert db collection {:language "Clojure"} doc))))
|
||||||
|
(is (= 1 (mc/count db collection)))
|
||||||
|
(is (mr/updated-existing? (mc/upsert db collection {:language "Clojure"} modified-doc {:multi false})))
|
||||||
|
(is (= 1 (mc/count db collection)))
|
||||||
|
(is (= (to-db-object modified-doc) (mc/find-by-id db collection doc-id)))
|
||||||
|
(mc/remove db collection))))
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
(ns monger.test.util-test
|
(ns monger.test.util-test
|
||||||
(:import com.mongodb.DBObject)
|
(:import com.mongodb.DBObject)
|
||||||
(:require [monger util conversion])
|
(:require [monger util conversion]
|
||||||
(:use clojure.test))
|
[clojure.test :refer :all]))
|
||||||
|
|
||||||
|
|
||||||
(deftest get-object-id
|
(deftest get-object-id
|
||||||
|
|
|
||||||
11
test/resources/logback.xml
Normal file
11
test/resources/logback.xml
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
<configuration>
|
||||||
|
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||||
|
<encoder>
|
||||||
|
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
|
||||||
|
</encoder>
|
||||||
|
</appender>
|
||||||
|
<logger name="org.mongodb" level="WARN"/>
|
||||||
|
<root level="DEBUG">
|
||||||
|
<appender-ref ref="STDOUT"/>
|
||||||
|
</root>
|
||||||
|
</configuration>
|
||||||
Loading…
Reference in a new issue