Compare commits
736 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
492f09fbdf | ||
|
|
e3e73f5351 | ||
|
|
e5aff2645b | ||
|
|
b3c0fbb02d | ||
|
|
51de522e88 | ||
|
|
d2842b246f | ||
|
|
1f954f45b4 | ||
|
|
c14d7bf616 | ||
|
|
ec7cfa3b8e | ||
|
|
f1cff3cdaa | ||
|
|
0fd1273ed9 | ||
|
|
16798cb3c2 | ||
|
|
963072ec0a | ||
|
|
7f64c020b4 | ||
|
|
a93854fb09 | ||
|
|
b6cd129c93 | ||
|
|
c532ec343a | ||
|
|
a44e240402 | ||
|
|
9b82ede047 | ||
|
|
e1986373fd | ||
|
|
5407456973 | ||
|
|
31f0aa348d | ||
|
|
28abf1c15b | ||
|
|
2deede7513 | ||
|
|
5a48825439 | ||
|
|
f4a3e293e8 | ||
|
|
f0697db32b | ||
|
|
2cc5e427bc | ||
|
|
a8f08bab86 | ||
|
|
19211a0f8e | ||
|
|
9e0c24435c | ||
|
|
19b1e713b2 | ||
|
|
af26bb6b31 | ||
|
|
d78f47f035 | ||
|
|
8cd956e90a | ||
|
|
49cc6a708d | ||
|
|
0bf472a29b | ||
|
|
2de00cdb2f | ||
|
|
05c38b819f | ||
|
|
c5674d9264 | ||
|
|
048af85be0 | ||
|
|
110877eb04 | ||
|
|
5d571005bb | ||
|
|
57688d8642 | ||
|
|
d6227e52f9 | ||
|
|
7e6e029352 | ||
|
|
12c4bf6361 | ||
|
|
98795cb33e | ||
|
|
fa63efcb24 | ||
|
|
5ffaa79f4e | ||
|
|
f775d485c6 | ||
|
|
370b4145d2 | ||
|
|
4b78ef6523 | ||
|
|
5163e544ae | ||
|
|
431512a66c | ||
|
|
532b460067 | ||
|
|
af846a962a | ||
|
|
776dadeac8 | ||
|
|
629dfc187e | ||
|
|
289438b1e4 | ||
|
|
83958ba316 | ||
|
|
3a99d4c29a | ||
|
|
561c3d4f39 | ||
|
|
7f74794a11 | ||
|
|
238d8c5ed0 | ||
|
|
c096caac7d | ||
|
|
c794acaf61 | ||
|
|
339db9d1b8 | ||
|
|
c04fe744cb | ||
|
|
a16558e4a3 | ||
|
|
0675b052c6 | ||
|
|
6b85edfa84 | ||
|
|
f4ec21792f | ||
|
|
67bd722cfd | ||
|
|
86dd81f770 | ||
|
|
2f146dd142 | ||
|
|
0ba857aa22 | ||
|
|
a94ea17e0e | ||
|
|
3b99fa0fb4 | ||
|
|
4b0c0274e8 | ||
|
|
83217f3413 | ||
|
|
aaa1450b2a | ||
|
|
89c6099a3c | ||
|
|
7b44f78133 | ||
|
|
af2076d4a5 | ||
|
|
79c6427cc9 | ||
|
|
d54f2e47b0 | ||
|
|
cc3b33e885 | ||
|
|
b081089df4 | ||
|
|
414cf51f98 | ||
|
|
8bbc64317d | ||
|
|
5f48ee5644 | ||
|
|
5733a00f54 | ||
|
|
894acbb0fc | ||
|
|
d70a9ed7c6 | ||
|
|
27de3680b4 | ||
|
|
244b949b6d | ||
|
|
29b9123f5b | ||
|
|
a2296534c7 | ||
|
|
dc8e9d2e0f | ||
|
|
25f610cc8a | ||
|
|
d8c04f0ec9 | ||
|
|
85826e1fe0 | ||
|
|
14a722f2fd | ||
|
|
9d0afc975a | ||
|
|
eaa6393798 | ||
|
|
7d485d732a | ||
|
|
aff4e4fd02 | ||
|
|
18413586fb | ||
|
|
aeea743921 | ||
|
|
e5bba39c62 | ||
|
|
c2f708a37a | ||
|
|
7f50fe1cb7 | ||
|
|
a2127a4da9 | ||
|
|
67215f1209 | ||
|
|
3b33f90e5c | ||
|
|
3b7b1ace8b | ||
|
|
cd63501680 | ||
|
|
0020499d4e | ||
|
|
f4b85242d4 | ||
|
|
a416441427 | ||
|
|
e3ef84a56c | ||
|
|
3ab78fc1ed | ||
|
|
fa0f026410 | ||
|
|
9c96a2b2c3 | ||
|
|
0986210221 | ||
|
|
7d5372f049 | ||
|
|
a5022e9bc4 | ||
|
|
aff8fbd62a | ||
|
|
633fbceb5a | ||
|
|
fb9a0d8482 | ||
|
|
d73807df1b | ||
|
|
e56f6ce87f | ||
|
|
c5c6fc107c | ||
|
|
368c644922 | ||
|
|
4d050f5021 | ||
|
|
83923e0e2a | ||
|
|
25588850dd | ||
|
|
55c81f4f54 | ||
|
|
ac7551e47f | ||
|
|
6d3043de9a | ||
|
|
1a94b6e4ee | ||
|
|
33902b5061 | ||
|
|
d00db4bd40 | ||
|
|
a5dcbf043a | ||
|
|
c31203582f | ||
|
|
f146afecdc | ||
|
|
324a541a64 | ||
|
|
6b71d773d7 | ||
|
|
10447afe0c | ||
|
|
c9dfd60f0f | ||
|
|
26a8fafd03 | ||
|
|
00f652a094 | ||
|
|
d050ae5732 | ||
|
|
8bcab93588 | ||
|
|
1839f55055 | ||
|
|
4220df5bf8 | ||
|
|
95c6d1531f | ||
|
|
b7ed099e06 | ||
|
|
7e2e546e55 | ||
|
|
7ce2ebe26e | ||
|
|
fbf4d1baa8 | ||
|
|
187f260fe4 | ||
|
|
04411075b4 | ||
|
|
459a9c191b | ||
|
|
137cba8bbb | ||
|
|
548cbd87b6 | ||
|
|
02647ad125 | ||
|
|
fe549f7254 | ||
|
|
c069e094e6 | ||
|
|
62f3656ebb | ||
|
|
cbc718e03d | ||
|
|
23be69b9ce | ||
|
|
b7b5b085b3 | ||
|
|
2d292c50b0 | ||
|
|
f959a77890 | ||
|
|
c04c3d66a3 | ||
|
|
b204c1b33e | ||
|
|
dfa029b341 | ||
|
|
04a8c47cda | ||
|
|
88ea57f2be | ||
|
|
521bbd2535 | ||
|
|
0474632640 | ||
|
|
8aab5e5a01 | ||
|
|
3db5fc728e | ||
|
|
b027f15a4c | ||
|
|
fd0a554d59 | ||
|
|
d4daa305a8 | ||
|
|
2d63d6006d | ||
|
|
5007e68cc1 | ||
|
|
3ea4e0f9dd | ||
|
|
e9ac77c058 | ||
|
|
daef8b6e8e | ||
|
|
f671a9bd43 | ||
|
|
57b52862c8 | ||
|
|
af917b9465 | ||
|
|
ee545487b8 | ||
|
|
240e53794c | ||
|
|
7b6a06888d | ||
|
|
034a3528af | ||
|
|
ff28789507 | ||
|
|
cdfdeafdac | ||
|
|
16a35e0329 | ||
|
|
79f05c3d7f | ||
|
|
9217821472 | ||
|
|
8d223abd05 | ||
|
|
6a973b245f | ||
|
|
714b23e0ce | ||
|
|
d4a6614c11 | ||
|
|
82ce0abe1a | ||
|
|
dec7c125d6 | ||
|
|
db12c4ba5a | ||
|
|
5bbe481e98 | ||
|
|
fb39c31986 | ||
|
|
dd446472bc | ||
|
|
72d82d3083 | ||
|
|
cdfe2a0b59 | ||
|
|
5525a4fbf9 | ||
|
|
59464f3b3c | ||
|
|
052cfdfd45 | ||
|
|
b31c21bb91 | ||
|
|
5b6e5ca568 | ||
|
|
1a9136c0c1 | ||
|
|
59753bb55a | ||
|
|
8d963fc5da | ||
|
|
c1de745014 | ||
|
|
aa35aaeb70 | ||
|
|
a5725806f5 | ||
|
|
d715414683 | ||
|
|
f2c2451b7d | ||
|
|
5b8d0d08ee | ||
|
|
18186f26e2 | ||
|
|
10acc14c14 | ||
|
|
87effb9013 | ||
|
|
19819680f9 | ||
|
|
2d2f67cc93 | ||
|
|
e9818fe11a | ||
|
|
a63db5586c | ||
|
|
68ab74a5bf | ||
|
|
de33734118 | ||
|
|
c272c7317e | ||
|
|
d7fc605f7b | ||
|
|
3b805b9e03 | ||
|
|
91cca3f2c4 | ||
|
|
2de6384d0f | ||
|
|
ab1c0ff7b8 | ||
|
|
ae2846c5bf | ||
|
|
e88c9cf791 | ||
|
|
fadca10f62 | ||
|
|
40320136f3 | ||
|
|
bc575de3b0 | ||
|
|
09b2afa79d | ||
|
|
96b564eb9a | ||
|
|
38390d3475 | ||
|
|
6937bb519b | ||
|
|
6e4d463053 | ||
|
|
a9d2050806 | ||
|
|
6676389062 | ||
|
|
81f85b8cca | ||
|
|
77f318bd77 | ||
|
|
7c7e70418f | ||
|
|
44a1123034 | ||
|
|
e487c08b0c | ||
|
|
a002d30aa9 | ||
|
|
36ddd26edc | ||
|
|
6197655e98 | ||
|
|
929faea88b | ||
|
|
1fe1c13531 | ||
|
|
838ddb5d26 | ||
|
|
33c7f0980f | ||
|
|
4bbc443a0e | ||
|
|
655dbc9783 | ||
|
|
0d752fd6e6 | ||
|
|
8aabf2fa5e | ||
|
|
ff9d338bd7 | ||
|
|
2a4ee12363 | ||
|
|
a66438fc20 | ||
|
|
0ccc037b8e | ||
|
|
00792192c3 | ||
|
|
e064b505c9 | ||
|
|
7df2bdf8ff | ||
|
|
2f9fc1618e | ||
|
|
9e2aecf4ae | ||
|
|
c32c4beb59 | ||
|
|
d48f3ec535 | ||
|
|
5f16aecd13 | ||
|
|
5fc49b1649 | ||
|
|
1e7dc7ce66 | ||
|
|
234783f442 | ||
|
|
3429350964 | ||
|
|
d130984bdc | ||
|
|
f5378bf825 | ||
|
|
21057c3d17 | ||
|
|
d0a98eb71d | ||
|
|
bc95c4d390 | ||
|
|
d56a4ea77d | ||
|
|
5a09626cbf | ||
|
|
029291a1dd | ||
|
|
989a2596cb | ||
|
|
f5c520dbc8 | ||
|
|
80c843eb20 | ||
|
|
9b136537c0 | ||
|
|
d334c5a44c | ||
|
|
cfd55be95b | ||
|
|
079c5a95aa | ||
|
|
3f6821f11f | ||
|
|
1a868ae35e | ||
|
|
248bcfa177 | ||
|
|
ee076ec02f | ||
|
|
1184d6ee2d | ||
|
|
062b4e8757 | ||
|
|
30a417d810 | ||
|
|
1671f960b6 | ||
|
|
d4cce9ac00 | ||
|
|
8f9576aa42 | ||
|
|
f15fd2a418 | ||
|
|
01656db002 | ||
|
|
84faff6bd4 | ||
|
|
d72e1531d3 | ||
|
|
ac59cf930a | ||
|
|
7bdc8d3aac | ||
|
|
47548a21ea | ||
|
|
5aaa8f79e7 | ||
|
|
1a77b1bc56 | ||
|
|
140fb2e9ea | ||
|
|
b571c8958d | ||
|
|
8d54cae54d | ||
|
|
14a71f0498 | ||
|
|
14c265f3a1 | ||
|
|
440a289ac6 | ||
|
|
9663a2227b | ||
|
|
b134e1916d | ||
|
|
65b02f92b4 | ||
|
|
667b71e073 | ||
|
|
225dbee15f | ||
|
|
c04ceb163b | ||
|
|
711ac343fe | ||
|
|
852a4ecc59 | ||
|
|
7ab2428c64 | ||
|
|
350acf66bc | ||
|
|
ab94a94b2e | ||
|
|
4c77763cd3 | ||
|
|
f197953480 | ||
|
|
44afd4939e | ||
|
|
575917435e | ||
|
|
2db55ab0aa | ||
|
|
79602b7dbe | ||
|
|
d5d2371b9e | ||
|
|
c95e8a5748 | ||
|
|
f0c0a86118 | ||
|
|
3d82e12e6b | ||
|
|
8672808222 | ||
|
|
29fb085d8b | ||
|
|
15cac49f9c | ||
|
|
946deac48c | ||
|
|
02229f291c | ||
|
|
1009491920 | ||
|
|
05730ded1b | ||
|
|
612845f59c | ||
|
|
1f06954952 | ||
|
|
7bcf0322d2 | ||
|
|
7dd2f350eb | ||
|
|
e433375cac | ||
|
|
d16013aa6b | ||
|
|
dab5473740 | ||
|
|
e6fce75dfd | ||
|
|
e75f022844 | ||
|
|
611ece049b | ||
|
|
be2286edf7 | ||
|
|
b99648672b | ||
|
|
28708ce24e | ||
|
|
1c6c703640 | ||
|
|
67b1fe5fbc | ||
|
|
4f6501f140 | ||
|
|
2a3f746cb6 | ||
|
|
90b8ba7246 | ||
|
|
f12648af4c | ||
|
|
e812f89b47 | ||
|
|
f96d700d8d | ||
|
|
32e7f2032d | ||
|
|
43ac1984ab | ||
|
|
28f262309c | ||
|
|
eefe6b3b21 | ||
|
|
d0f2ca9efc | ||
|
|
e6c8ee037a | ||
|
|
e63013deac | ||
|
|
2367379b6d | ||
|
|
a1c483f2e1 | ||
|
|
64b8b500ae | ||
|
|
2d15e37bc7 | ||
|
|
54655b88c0 | ||
|
|
cd395e3324 | ||
|
|
33bdbbe851 | ||
|
|
9f1448df44 | ||
|
|
e3a4bada63 | ||
|
|
dcdf3a2365 | ||
|
|
423e10b7bc | ||
|
|
f62feac421 | ||
|
|
bcbefa9264 | ||
|
|
a2243536b2 | ||
|
|
a36e292158 | ||
|
|
494c22b192 | ||
|
|
030f12023c | ||
|
|
31f640a398 | ||
|
|
54f098a906 | ||
|
|
885d05965b | ||
|
|
a8a0fb5dba | ||
|
|
67edae8602 | ||
|
|
249e7746d5 | ||
|
|
6a979088b5 | ||
|
|
fca629c117 | ||
|
|
dfbd1bded5 | ||
|
|
f9e98669b9 | ||
|
|
96d4abdf24 | ||
|
|
23442ef639 | ||
|
|
01b571dec9 | ||
|
|
04ec49eb9e | ||
|
|
d61cf8f57e | ||
|
|
50a12121f2 | ||
|
|
998bd1f9bb | ||
|
|
e0a57fa19b | ||
|
|
9c78802c47 | ||
|
|
a958ffb5c8 | ||
|
|
c31872d979 | ||
|
|
212509f56a | ||
|
|
b348bb6679 | ||
|
|
8be5dd3909 | ||
|
|
f2c4370584 | ||
|
|
fdff74f7b5 | ||
|
|
0070b12f95 | ||
|
|
bafc2bebf2 | ||
|
|
7146fb33e9 | ||
|
|
75999d9e36 | ||
|
|
7a64025669 | ||
|
|
113106037a | ||
|
|
132834b1e6 | ||
|
|
36a4b7f727 | ||
|
|
a8432f5bf1 | ||
|
|
e5a295bb8f | ||
|
|
f7cbd4264a | ||
|
|
0c37a20a0b | ||
|
|
230775f98e | ||
|
|
b28da2eed3 | ||
|
|
02de914993 | ||
|
|
5b498f809e | ||
|
|
f94a7ee742 | ||
|
|
ab0ffab488 | ||
|
|
85b47d66f1 | ||
|
|
0d8fe46f3b | ||
|
|
0bc78f99dd | ||
|
|
e4f450f667 | ||
|
|
49cd44295c | ||
|
|
767d97a831 | ||
|
|
bbe8410979 | ||
|
|
c0a4bdb548 | ||
|
|
673a81af0e | ||
|
|
977032620e | ||
|
|
c7263e5b11 | ||
|
|
eed9b2470a | ||
|
|
c350be1f52 | ||
|
|
65b058ffd9 | ||
|
|
4a5789d67e | ||
|
|
7b05cfad94 | ||
|
|
a16a9fe1fe | ||
|
|
2f98a6656b | ||
|
|
9e2f6055a3 | ||
|
|
7f58538292 | ||
|
|
2f208d712c | ||
|
|
63d9875576 | ||
|
|
b7ffff4769 | ||
|
|
715ae26f3c | ||
|
|
00350edd32 | ||
|
|
38e1d0d92d | ||
|
|
8f00ffd291 | ||
|
|
0af8d6839e | ||
|
|
9b02897db5 | ||
|
|
99203b397a | ||
|
|
eda1c79315 | ||
|
|
e7150f525e | ||
|
|
7d6b5ae5fe | ||
|
|
d70e459ffe | ||
|
|
a26e780957 | ||
|
|
8fb0e1326b | ||
|
|
9014f770d8 | ||
|
|
f128e6df15 | ||
|
|
270456ed81 | ||
|
|
4e960a9682 | ||
|
|
061c28f84a | ||
|
|
cba7eaba4c | ||
|
|
ada7e199a4 | ||
|
|
977e5e4c5c | ||
|
|
c8307d5a39 | ||
|
|
dcf184888e | ||
|
|
59d0042d13 | ||
|
|
8af904b81f | ||
|
|
ffceed8da9 | ||
|
|
34d66a276a | ||
|
|
e71ec874ab | ||
|
|
f24e8e5361 | ||
|
|
bf86f39b2d | ||
|
|
f662d7ca0d | ||
|
|
62eb719b1e | ||
|
|
69b582823a | ||
|
|
297ef98239 | ||
|
|
f71f107445 | ||
|
|
36e2d80d71 | ||
|
|
467536cb34 | ||
|
|
302c8031f9 | ||
|
|
7c6e951c7c | ||
|
|
92cc2a582a | ||
|
|
24171b3ae2 | ||
|
|
456c1ad26a | ||
|
|
fc41793d5d | ||
|
|
afef243634 | ||
|
|
869b88702d | ||
|
|
aca403c112 | ||
|
|
df0372eee1 | ||
|
|
c4c6267d91 | ||
|
|
73d5886aae | ||
|
|
0db47169cf | ||
|
|
ec16b873b7 | ||
|
|
2a3a4cf030 | ||
|
|
df2b2a2f68 | ||
|
|
fd0a402c99 | ||
|
|
6bd0f758fe | ||
|
|
10c0203605 | ||
|
|
82b33331fc | ||
|
|
75b5a548b6 | ||
|
|
0c481feb72 | ||
|
|
c8a791d367 | ||
|
|
510028a834 | ||
|
|
1a86761e2e | ||
|
|
30da62181f | ||
|
|
a977b8a790 | ||
|
|
f3e067f59f | ||
|
|
dbfd4e5c62 | ||
|
|
c574e5cf8a | ||
|
|
f9f4c4621b | ||
|
|
23254c10dc | ||
|
|
255491c446 | ||
|
|
1d943d62a3 | ||
|
|
7538b1a1a5 | ||
|
|
828c074167 | ||
|
|
87ab1ac48c | ||
|
|
454afd9877 | ||
|
|
45971b212c | ||
|
|
68370c16fb | ||
|
|
d2c9b47366 | ||
|
|
4d7ee0e741 | ||
|
|
e7f3a2436d | ||
|
|
4ef1ff6aff | ||
|
|
b6ad32d7d4 | ||
|
|
e875f9ea33 | ||
|
|
9db9d16cf8 | ||
|
|
f00991dc29 | ||
|
|
bacbd7133e | ||
|
|
f38f6d67ab | ||
|
|
3f27e8e152 | ||
|
|
23177fef0c | ||
|
|
f3b90c2b8a | ||
|
|
d57c5a9529 | ||
|
|
986ea39f90 | ||
|
|
5bd7ff1413 | ||
|
|
93b9f23b07 | ||
|
|
42ab7d2f63 | ||
|
|
a6a2f0bde9 | ||
|
|
7d0b070d1f | ||
|
|
81bc3c599b | ||
|
|
403f0019d5 | ||
|
|
4f65bb0810 | ||
|
|
ef29e69a87 | ||
|
|
5cffb3c07c | ||
|
|
61d3a0bd1f | ||
|
|
82d67c1dbb | ||
|
|
85a30ec915 | ||
|
|
c70c29b2c7 | ||
|
|
2a5ae0da37 | ||
|
|
826015e9c1 | ||
|
|
9dda0a2f93 | ||
|
|
7dfe460433 | ||
|
|
73a0f04933 | ||
|
|
a1c165921d | ||
|
|
3872b379cd | ||
|
|
98fe043b95 | ||
|
|
c217618d9d | ||
|
|
b1020d19ba | ||
|
|
a481636429 | ||
|
|
efa9a2d408 | ||
|
|
149a703ecc | ||
|
|
2b715c54d3 | ||
|
|
ece261aadb | ||
|
|
dae0ac3b4d | ||
|
|
5ab75eb65a | ||
|
|
e96ef8e18f | ||
|
|
82af678cab | ||
|
|
6ed274bd9b | ||
|
|
48ac7e75ba | ||
|
|
a51c96298f | ||
|
|
f1354c4508 | ||
|
|
ff7588f648 | ||
|
|
124036fe36 | ||
|
|
80c5b536df | ||
|
|
2ee33b1444 | ||
|
|
eec6cea507 | ||
|
|
90d03d92d8 | ||
|
|
9a48e32565 | ||
|
|
ede6927b65 | ||
|
|
2edc29f758 | ||
|
|
5bd9bcca75 | ||
|
|
54f75e653b | ||
|
|
7b33f56e33 | ||
|
|
829eed7d6c | ||
|
|
5d3a3e1fe2 | ||
|
|
e71ec4fc41 | ||
|
|
f065e295e9 | ||
|
|
d23e1b3247 | ||
|
|
a1e8e6f3bc | ||
|
|
e4030197e8 | ||
|
|
2885c35511 | ||
|
|
a2a33390b6 | ||
|
|
bf606bcc47 | ||
|
|
e91809957d | ||
|
|
313a7b86e8 | ||
|
|
9d7473487f | ||
|
|
2734a7d8d4 | ||
|
|
0f85808531 | ||
|
|
7c0afda0a6 | ||
|
|
dbe17249b7 | ||
|
|
cfe0baadd1 | ||
|
|
8d0143ad76 | ||
|
|
43fee26950 | ||
|
|
70145a4c86 | ||
|
|
f7a90e93c5 | ||
|
|
3e78ce212a | ||
|
|
5a87dec2d5 | ||
|
|
f4556406bd | ||
|
|
af6d1eff0c | ||
|
|
191993caef | ||
|
|
dbed948c73 | ||
|
|
8f232e4983 | ||
|
|
e961d3c995 | ||
|
|
699d5f40f5 | ||
|
|
4b58ecc041 | ||
|
|
4b8fb812fa | ||
|
|
78137c882d | ||
|
|
8033b05cb7 | ||
|
|
a1a0675976 | ||
|
|
a88748d798 | ||
|
|
c2fc09e324 | ||
|
|
be8e70225a | ||
|
|
92079ca200 | ||
|
|
52b13ccf58 | ||
|
|
d73c2e3602 | ||
|
|
39e301d98d | ||
|
|
a7d865eb5e | ||
|
|
cde39008cf | ||
|
|
ffaa7cae6f | ||
|
|
2ef9844219 | ||
|
|
6f13837890 | ||
|
|
1ca2f5c3f1 | ||
|
|
a5e209379c | ||
|
|
bcb5628840 | ||
|
|
0b27635d67 | ||
|
|
198ebaa7d8 | ||
|
|
bebb1b6ce4 | ||
|
|
f59a38575a | ||
|
|
da70b34f13 | ||
|
|
f4f24ec1a7 | ||
|
|
f1365c5c55 | ||
|
|
71364255ca | ||
|
|
22952c3ef0 | ||
|
|
27b3b604c9 | ||
|
|
bce280d02b | ||
|
|
4b6ab894bf | ||
|
|
38c1d7fc37 | ||
|
|
43bef87966 | ||
|
|
c2498d41a1 | ||
|
|
c4182262ce | ||
|
|
3149c6e35b | ||
|
|
6d662461b8 | ||
|
|
597d6825f7 | ||
|
|
8b060455c2 | ||
|
|
2bf60ac827 | ||
|
|
f3d6f405c9 | ||
|
|
bd985a6589 | ||
|
|
c1417c4e4b | ||
|
|
36515abad4 | ||
|
|
cba9088b5e | ||
|
|
a33aece85d | ||
|
|
076d334b3c | ||
|
|
e644692a8a | ||
|
|
1d547ec150 | ||
|
|
ce3066dc59 | ||
|
|
c11dcd19ee | ||
|
|
1d60cd7e98 | ||
|
|
4700b4dda2 | ||
|
|
91f1dc1c6a | ||
|
|
70d87a9f71 | ||
|
|
427b468891 | ||
|
|
30ed3350c7 | ||
|
|
78e04f0b42 | ||
|
|
f311bdfbb4 | ||
|
|
a96e75bb64 | ||
|
|
dacb88e97f | ||
|
|
59f276f2b5 | ||
|
|
8ecb09b142 | ||
|
|
c1c25b88e7 | ||
|
|
b5effeb4d8 | ||
|
|
ad6d2c97b7 | ||
|
|
8b0ecf17c4 | ||
|
|
19e62787b8 | ||
|
|
b8298ed23d | ||
|
|
3277673e39 | ||
|
|
b56e17e0eb | ||
|
|
4d10962d12 | ||
|
|
b7310fd1ae | ||
|
|
c66ffeaa09 | ||
|
|
d1c6b0cd19 | ||
|
|
b5d5485196 | ||
|
|
3982536301 | ||
|
|
1e84f379b2 | ||
|
|
d605a227fc | ||
|
|
8dea071270 | ||
|
|
fece1e99cb | ||
|
|
8918c97189 | ||
|
|
3f5cc897da | ||
|
|
9d5b72db49 | ||
|
|
65401bf4c3 | ||
|
|
c8b64601db | ||
|
|
ab4fe5cb0b | ||
|
|
c1a8ffec96 | ||
|
|
5c3bb00b24 | ||
|
|
07c728bb32 | ||
|
|
c7e1ca5863 | ||
|
|
6ab43c2391 | ||
|
|
96f389e580 |
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -6,7 +6,6 @@ Make sure that:
|
||||
-->
|
||||
|
||||
- [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc).
|
||||
- [ ] There is a ticket in the bug tracker for the project in our [JIRA](https://jira.spring.io/browse/DATAMONGO).
|
||||
- [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes.
|
||||
- [ ] You submit test cases (unit or integration tests) that back your changes.
|
||||
- [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only).
|
||||
|
||||
47
.github/workflows/project.yml
vendored
Normal file
47
.github/workflows/project.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
# GitHub Actions to automate GitHub issues for Spring Data Project Management
|
||||
|
||||
name: Spring Data GitHub Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, edited, reopened]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_target:
|
||||
types: [opened, edited, reopened]
|
||||
|
||||
jobs:
|
||||
Inbox:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request == null
|
||||
steps:
|
||||
- name: Create or Update Issue Card
|
||||
uses: peter-evans/create-or-update-project-card@v1.1.2
|
||||
with:
|
||||
project-name: 'Spring Data'
|
||||
column-name: 'Inbox'
|
||||
project-location: 'spring-projects'
|
||||
token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
|
||||
Pull-Request:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request != null
|
||||
steps:
|
||||
- name: Create or Update Pull Request Card
|
||||
uses: peter-evans/create-or-update-project-card@v1.1.2
|
||||
with:
|
||||
project-name: 'Spring Data'
|
||||
column-name: 'Review pending'
|
||||
project-location: 'spring-projects'
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
|
||||
Feedback-Provided:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'spring-projects' && github.event_name == 'issue_comment' && github.event.action == 'created' && github.actor != 'spring-projects-issues' && github.event.pull_request == null && github.event.issue.state == 'open' && contains(toJSON(github.event.issue.labels), 'waiting-for-feedback')
|
||||
steps:
|
||||
- name: Update Project Card
|
||||
uses: peter-evans/create-or-update-project-card@v1.1.2
|
||||
with:
|
||||
project-name: 'Spring Data'
|
||||
column-name: 'Feedback provided'
|
||||
project-location: 'spring-projects'
|
||||
token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
|
||||
3
.mvn/wrapper/maven-wrapper.properties
vendored
3
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1 +1,2 @@
|
||||
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
|
||||
#Mon Aug 14 08:53:22 EDT 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.4/apache-maven-3.9.4-bin.zip
|
||||
|
||||
4
CI.adoc
4
CI.adoc
@@ -1,6 +1,6 @@
|
||||
= Continuous Integration
|
||||
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Moore%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Moore%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
|
||||
@@ -16,7 +16,7 @@ All of these use cases are great reasons to essentially run what the CI server d
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash`
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3:latest /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
= Spring Data contribution guidelines
|
||||
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here].
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here].
|
||||
|
||||
237
Jenkinsfile
vendored
237
Jenkinsfile
vendored
@@ -1,9 +1,15 @@
|
||||
def p = [:]
|
||||
node {
|
||||
checkout scm
|
||||
p = readProperties interpolate: true, file: 'ci/pipeline.properties'
|
||||
}
|
||||
|
||||
pipeline {
|
||||
agent none
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
|
||||
upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
@@ -14,49 +20,77 @@ pipeline {
|
||||
stages {
|
||||
stage("Docker images") {
|
||||
parallel {
|
||||
stage('Publish JDK 8 + MongoDB 4.0') {
|
||||
stage('Publish JDK (Java 17) + MongoDB 4.4') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||
anyOf {
|
||||
changeset "ci/openjdk17-mongodb-4.4/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0", "ci/openjdk8-mongodb-4.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.4.4.version']} ci/openjdk17-mongodb-4.4/")
|
||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.2') {
|
||||
stage('Publish JDK (Java 17) + MongoDB 5.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.2/**"
|
||||
anyOf {
|
||||
changeset "ci/openjdk17-mongodb-5.0/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2.0", "ci/openjdk8-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.5.0.version']} ci/openjdk17-mongodb-5.0/")
|
||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 15 + MongoDB 4.2') {
|
||||
stage('Publish JDK (Java 17) + MongoDB 6.0') {
|
||||
when {
|
||||
changeset "ci/openjdk15-mongodb-4.2/**"
|
||||
anyOf {
|
||||
changeset "ci/openjdk17-mongodb-6.0/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk15-with-mongodb-4.2.0", "ci/openjdk15-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.6.0.version']} ci/openjdk17-mongodb-6.0/")
|
||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK (Java 20) + MongoDB 6.0') {
|
||||
when {
|
||||
anyOf {
|
||||
changeset "ci/openjdk20-mongodb-6.0/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-6.0:${p['java.next.tag']}", "--build-arg BASE=${p['docker.java.next.image']} --build-arg MONGODB=${p['docker.mongodb.6.0.version']} ci/openjdk20-mongodb-6.0/")
|
||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
@@ -65,97 +99,108 @@ pipeline {
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk8)") {
|
||||
stage("test: baseline (main)") {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
anyOf {
|
||||
branch 'master'
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
script {
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("Test other configurations") {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
allOf {
|
||||
branch 'master'
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: mongodb 4.0 (jdk8)") {
|
||||
|
||||
stage("test: MongoDB 5.0 (main)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
script {
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 4.2 (jdk8)") {
|
||||
stage("test: MongoDB 6.0 (main)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
script {
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongosh --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk15)") {
|
||||
stage("test: MongoDB 6.0 (next)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk15-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
script {
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-6.0:${p['java.next.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongosh --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -163,62 +208,36 @@ pipeline {
|
||||
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
anyOf {
|
||||
branch 'master'
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Publish documentation') {
|
||||
when {
|
||||
branch 'master'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
script {
|
||||
docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.basic']) {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -v'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,distribute ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.distribution-repository=temp-private-local " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
202
LICENSE.txt
Normal file
202
LICENSE.txt
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
https://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
273
README.adoc
273
README.adoc
@@ -1,17 +1,19 @@
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start]
|
||||
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
|
||||
The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
The primary goal of the https://spring.io/projects/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities.
|
||||
The Spring Data MongoDB project provides integration with the MongoDB document database.
|
||||
Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer.
|
||||
|
||||
[[code-of-conduct]]
|
||||
== Code of Conduct
|
||||
|
||||
This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
|
||||
|
||||
[[getting-started]]
|
||||
== Getting Started
|
||||
|
||||
Here is a quick teaser of an application using Spring Data Repositories in Java:
|
||||
@@ -59,6 +61,7 @@ class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
}
|
||||
----
|
||||
|
||||
[[maven-configuration]]
|
||||
=== Maven configuration
|
||||
|
||||
Add the Maven dependency:
|
||||
@@ -68,164 +71,35 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.RELEASE</version>
|
||||
<version>${version}</version>
|
||||
</dependency>
|
||||
----
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository
|
||||
and declare the appropriate dependency version.
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.BUILD-SNAPSHOT</version>
|
||||
<version>${version}-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<id>spring-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
<url>https://repo.spring.io/snapshot</url>
|
||||
</repository>
|
||||
----
|
||||
|
||||
== Upgrading from 2.x
|
||||
[[upgrading]]
|
||||
== Upgrading
|
||||
|
||||
The 4.0 MongoDB Java Driver does no longer support certain features that have already been deprecated in one of the last minor versions.
|
||||
Some of the changes affect the initial setup configuration as well as compile/runtime features. We summarized the most typical changes one might encounter.
|
||||
|
||||
=== XML Namespace
|
||||
|
||||
.Changed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | 2.x | 3.x
|
||||
|
||||
| `<mongo:mongo-client />`
|
||||
| Used to create a `com.mongodb.MongoClient`
|
||||
| Now exposes a `com.mongodb.client.MongoClient`
|
||||
|
||||
| `<mongo:mongo-client replica-set="..." />`
|
||||
| Was a comma delimited list of replica set members (host/port)
|
||||
| Now defines the replica set name. +
|
||||
Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
| `<mongo:db-factory writeConcern="..." />`
|
||||
| NONE, NORMAL, SAFE, FSYNC_SAFE, REPLICAS_SAFE, MAJORITY
|
||||
| W1, W2, W3, UNAKNOWLEDGED, AKNOWLEDGED, JOURNALED, MAJORITY
|
||||
|===
|
||||
|
||||
.Removed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | Replacement in 3.x | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-ref="..." />`
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Referencing a `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `<mongo:mongo-client credentials="..." />`
|
||||
| `<mongo:mongo-client credential="..." />`
|
||||
| Single authentication data instead of list.
|
||||
|
||||
| `<mongo:client-options />`
|
||||
| `<mongo:client-settings />`
|
||||
| See `com.mongodb.MongoClientSettings` for details.
|
||||
|===
|
||||
|
||||
.New XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Replacement for `<mongo:db-factory mongo-ref="..." />`
|
||||
|
||||
| `<mongo:db-factory connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:mongo-client connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:client-settings />`
|
||||
| Namespace element for `com.mongodb.MongoClientSettings`.
|
||||
|
||||
|===
|
||||
|
||||
=== Java Configuration
|
||||
|
||||
.Java API changes
|
||||
|===
|
||||
Type | Comment
|
||||
|
||||
| `MongoClientFactoryBean`
|
||||
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
|
||||
Uses `MongoClientSettings` instead of `MongoClientOptions`.
|
||||
|
||||
| `MongoDataIntegrityViolationException`
|
||||
| Uses `WriteConcernResult` instead of `WriteResult`.
|
||||
|
||||
| `BulkOperationException`
|
||||
| Uses `MongoBulkWriteException` and `com.mongodb.bulk.BulkWriteError` instead of `BulkWriteException` and `com.mongodb.BulkWriteError`
|
||||
|
||||
| `ReactiveMongoClientFactoryBean`
|
||||
| Uses `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|
||||
| `ReactiveMongoClientSettingsFactoryBean`
|
||||
| Now produces `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|===
|
||||
|
||||
.Removed Java API:
|
||||
|===
|
||||
2.x | Replacement in 3.x | Comment
|
||||
|
||||
| `MongoClientOptionsFactoryBean`
|
||||
| `MongoClientSettingsFactoryBean`
|
||||
| Creating a `com.mongodb.MongoClientSettings`.
|
||||
|
||||
| `AbstractMongoConfiguration`
|
||||
| `AbstractMongoClientConfiguration` +
|
||||
(Available since 2.1)
|
||||
| Using `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `MongoDbFactory#getLegacyDb()`
|
||||
| -
|
||||
| -
|
||||
|
||||
| `SimpleMongoDbFactory`
|
||||
| `SimpleMongoClientDbFactory` +
|
||||
(Available since 2.1)
|
||||
|
|
||||
|
||||
| `MapReduceOptions#getOutputType()`
|
||||
| `MapReduceOptions#getMapReduceAction()`
|
||||
| Returns `MapReduceAction` instead of `MapReduceCommand.OutputType`.
|
||||
|
||||
| `Meta\|Query` maxScan & snapshot
|
||||
|
|
||||
|
|
||||
|===
|
||||
|
||||
=== Other Changes
|
||||
|
||||
==== UUID Types
|
||||
|
||||
The MongoDB UUID representation can now be configured with different formats.
|
||||
This has to be done via `MongoClientSettings` as shown in the snippet below.
|
||||
|
||||
.UUID Codec Configuration
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
static class Config extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
builder.uuidRepresentation(UuidRepresentation.STANDARD);
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
Instructions for how to upgrade from earlier versions of Spring Data are provided on the project https://github.com/spring-projects/spring-data-commons/wiki[wiki].
|
||||
Follow the links in the https://github.com/spring-projects/spring-data-commons/wiki#release-notes[release notes section] to find the version that you want to upgrade to.
|
||||
|
||||
[[getting-help]]
|
||||
== Getting Help
|
||||
|
||||
Having trouble with Spring Data? We’d love to help!
|
||||
@@ -237,32 +111,109 @@ If you are just starting out with Spring, try one of the https://spring.io/guide
|
||||
* If you are upgrading, check out the https://docs.spring.io/spring-data/mongodb/docs/current/changelog.txt[changelog] for "`new and noteworthy`" features.
|
||||
* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data[`spring-data-mongodb`].
|
||||
You can also chat with the community on https://gitter.im/spring-projects/spring-data[Gitter].
|
||||
* Report bugs with Spring Data MongoDB at https://jira.spring.io/browse/DATAMONGO[jira.spring.io/browse/DATAMONGO].
|
||||
* Report bugs with Spring Data MongoDB at https://github.com/spring-projects/spring-data-mongodb/issues[github.com/spring-projects/spring-data-mongodb/issues].
|
||||
|
||||
[[reporting-issues]]
|
||||
== Reporting Issues
|
||||
|
||||
Spring Data uses JIRA as issue tracking system to record bugs and feature requests. If you want to raise an issue, please follow the recommendations below:
|
||||
Spring Data uses Github as issue tracking system to record bugs and feature requests.
|
||||
If you want to raise an issue, please follow the recommendations below:
|
||||
|
||||
* Before you log a bug, please search the
|
||||
https://jira.spring.io/browse/DATAMONGO[issue tracker] to see if someone has already reported the problem.
|
||||
* If the issue doesn’t already exist, https://jira.spring.io/browse/DATAMONGO[create a new issue].
|
||||
* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using and JVM version.
|
||||
* If you need to paste code, or include a stack trace use JIRA `{code}…{code}` escapes before and after your text.
|
||||
* If possible try to create a test-case or project that replicates the issue. Attach a link to your code or a compressed file containing your code.
|
||||
* Before you log a bug, please search the https://github.com/spring-projects/spring-data-mongodb/issues[issue tracker] to see if someone has already reported the problem.
|
||||
* If the issue does not already exist, https://github.com/spring-projects/spring-data-mongodb/issues/new[create a new issue].
|
||||
* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using, the JVM version, Stacktrace, etc.
|
||||
* If you need to paste code, or include a stack trace use https://guides.github.com/features/mastering-markdown/[Markdown] code fences +++```+++.
|
||||
|
||||
[[guides]]
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
[[examples]]
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
[[building-from-source]]
|
||||
== Building from Source
|
||||
|
||||
You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper].
|
||||
You also need JDK 1.8.
|
||||
You do not need to build from source to use Spring Data. Binaries are available in https://repo.spring.io[repo.spring.io]
|
||||
and accessible from Maven using the Maven configuration noted <<maven-configuration,above>>.
|
||||
|
||||
NOTE: Configuration for Gradle is similar to Maven.
|
||||
|
||||
The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io].
|
||||
Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link]
|
||||
to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link]
|
||||
to build a reactive one.
|
||||
|
||||
However, if you want to try out the latest and greatest, Spring Data MongoDB can be easily built with the https://github.com/takari/maven-wrapper[Maven wrapper]
|
||||
and minimally, JDK 17 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]).
|
||||
|
||||
In order to build Spring Data MongoDB, you will need to https://www.mongodb.com/try/download/community[download]
|
||||
and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution].
|
||||
|
||||
Once you have installed MongoDB, you need to start a MongoDB server. It is convenient to set an environment variable to
|
||||
your MongoDB installation directory (e.g. `MONGODB_HOME`).
|
||||
|
||||
To run the full test suite, a https://docs.mongodb.com/manual/tutorial/deploy-replica-set/[MongoDB Replica Set]
|
||||
is required.
|
||||
|
||||
To run the MongoDB server enter the following command from a command-line:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ $MONGODB_HOME/bin/mongod --dbpath $MONGODB_HOME/runtime/data --ipv6 --port 27017 --replSet rs0
|
||||
...
|
||||
"msg":"Successfully connected to host"
|
||||
----
|
||||
|
||||
Once the MongoDB server starts up, you should see the message (`msg`), "_Successfully connected to host_".
|
||||
|
||||
Notice the `--dbpath` option to the `mongod` command. You can set this to anything you like, but in this case, we set
|
||||
the absolute path to a sub-directory (`runtime/data/`) under the MongoDB installation directory (in `$MONGODB_HOME`).
|
||||
|
||||
You need to initialize the MongoDB replica set only once on the first time the MongoDB server is started.
|
||||
To initialize the replica set, start a mongo client:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ $MONGODB_HOME/bin/mongo
|
||||
MongoDB server version: 6.0.0
|
||||
...
|
||||
----
|
||||
|
||||
Then enter the following command:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
mongo> rs.initiate({ _id: 'rs0', members: [ { _id: 0, host: '127.0.0.1:27017' } ] })
|
||||
----
|
||||
|
||||
Finally, on UNIX-based system (for example, Linux or Mac OS X) you may need to adjust the `ulimit`.
|
||||
In case you need to, you can adjust the `ulimit` with the following command (32768 is just a recommendation):
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ ulimit -n 32768
|
||||
----
|
||||
|
||||
You can use `ulimit -a` again to verify the `ulimit` for "_open files_" was set appropriately.
|
||||
|
||||
Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw` (Maven Wrapper) command:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ ./mvnw clean install
|
||||
----
|
||||
|
||||
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above].
|
||||
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.8.0 or above].
|
||||
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign
|
||||
the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
|
||||
=== Building reference documentation
|
||||
|
||||
@@ -275,17 +226,7 @@ Building the documentation builds also the project without running tests.
|
||||
|
||||
The generated documentation is available from `target/site/reference/html/index.html`.
|
||||
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
[[license]]
|
||||
== License
|
||||
|
||||
Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license].
|
||||
|
||||
@@ -10,7 +10,7 @@ All of these use cases are great reasons to essentially run what Concourse does
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
@@ -23,7 +23,7 @@ Since the container is binding to your source, you can make edits from your IDE
|
||||
If you need to test the `build.sh` script, do this:
|
||||
|
||||
1. `mkdir /tmp/spring-data-mongodb-artifactory`
|
||||
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary
|
||||
artifactory output directory at `spring-data-mongodb-artifactory`.
|
||||
@@ -36,4 +36,4 @@ IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about
|
||||
It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts
|
||||
and deliver them to artifactory.
|
||||
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk11:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk15:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
22
ci/openjdk17-mongodb-4.4/Dockerfile
Normal file
22
ci/openjdk17-mongodb-4.4/Dockerfile
Normal file
@@ -0,0 +1,22 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 && \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 && \
|
||||
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list && \
|
||||
echo ${TZ} > /etc/timezone
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
24
ci/openjdk17-mongodb-5.0/Dockerfile
Normal file
24
ci/openjdk17-mongodb-5.0/Dockerfile
Normal file
@@ -0,0 +1,24 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
|
||||
# MongoDB 5.0 release signing key
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 && \
|
||||
# Needed when MongoDB creates a 5.0 folder.
|
||||
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/5.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-5.0.list && \
|
||||
echo ${TZ} > /etc/timezone
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
24
ci/openjdk17-mongodb-6.0/Dockerfile
Normal file
24
ci/openjdk17-mongodb-6.0/Dockerfile
Normal file
@@ -0,0 +1,24 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
|
||||
# MongoDB 6.0 release signing key
|
||||
wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | apt-key add - && \
|
||||
# Needed when MongoDB creates a 6.0 folder.
|
||||
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/6.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \
|
||||
echo ${TZ} > /etc/timezone
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
24
ci/openjdk20-mongodb-6.0/Dockerfile
Normal file
24
ci/openjdk20-mongodb-6.0/Dockerfile
Normal file
@@ -0,0 +1,24 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
|
||||
# MongoDB 6.0 release signing key
|
||||
wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | apt-key add - && \
|
||||
# Needed when MongoDB creates a 6.0 folder.
|
||||
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu jammy/mongodb-org/6.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \
|
||||
echo ${TZ} > /etc/timezone
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.0.14 mongodb-org-server=4.0.14 mongodb-org-shell=4.0.14 mongodb-org-mongos=4.0.14 mongodb-org-tools=4.0.14 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
27
ci/pipeline.properties
Normal file
27
ci/pipeline.properties
Normal file
@@ -0,0 +1,27 @@
|
||||
# Java versions
|
||||
java.main.tag=17.0.8_7-jdk-focal
|
||||
java.next.tag=20-jdk-jammy
|
||||
|
||||
# Docker container images - standard
|
||||
docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag}
|
||||
docker.java.next.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.next.tag}
|
||||
|
||||
# Supported versions of MongoDB
|
||||
docker.mongodb.4.4.version=4.4.23
|
||||
docker.mongodb.5.0.version=5.0.19
|
||||
docker.mongodb.6.0.version=6.0.8
|
||||
|
||||
# Supported versions of Redis
|
||||
docker.redis.6.version=6.2.13
|
||||
|
||||
# Supported versions of Cassandra
|
||||
docker.cassandra.3.version=3.11.15
|
||||
|
||||
# Docker environment settings
|
||||
docker.java.inside.basic=-v $HOME:/tmp/jenkins-home
|
||||
docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home
|
||||
|
||||
# Credentials
|
||||
docker.registry=
|
||||
docker.credentials=hub.docker.com-springbuildmaster
|
||||
artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c
|
||||
53
pom.xml
53
pom.xml
@@ -5,17 +5,17 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<version>4.2.0-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
<description>MongoDB support for Spring Data</description>
|
||||
<url>https://projects.spring.io/spring-data-mongodb</url>
|
||||
<url>https://spring.io/projects/spring-data-mongodb</url>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.4.0</version>
|
||||
<version>3.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,8 +26,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.4.0</springdata.commons>
|
||||
<mongo>4.1.1</mongo>
|
||||
<springdata.commons>3.2.0-SNAPSHOT</springdata.commons>
|
||||
<mongo>4.10.2</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
@@ -112,6 +112,17 @@
|
||||
</developer>
|
||||
</developers>
|
||||
|
||||
<scm>
|
||||
<connection>scm:git:https://github.com/spring-projects/spring-data-mongodb.git</connection>
|
||||
<developerConnection>scm:git:git@github.com:spring-projects/spring-data-mongodb.git</developerConnection>
|
||||
<url>https://github.com/spring-projects/spring-data-mongodb</url>
|
||||
</scm>
|
||||
|
||||
<issueManagement>
|
||||
<system>GitHub</system>
|
||||
<url>https://github.com/spring-projects/spring-data-mongodb/issues</url>
|
||||
</issueManagement>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>benchmarks</id>
|
||||
@@ -134,31 +145,19 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
<id>spring-snapshot</id>
|
||||
<url>https://repo.spring.io/snapshot</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<id>spring-milestone</id>
|
||||
<url>https://repo.spring.io/milestone</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
<url>https://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
<pluginRepository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</pluginRepository>
|
||||
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
29
settings.xml
Normal file
29
settings.xml
Normal file
@@ -0,0 +1,29 @@
|
||||
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
|
||||
https://maven.apache.org/xsd/settings-1.0.0.xsd">
|
||||
|
||||
<servers>
|
||||
<server>
|
||||
<id>spring-plugins-release</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
<server>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
<server>
|
||||
<id>spring-libs-milestone</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
<server>
|
||||
<id>spring-libs-release</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
</servers>
|
||||
|
||||
</settings>
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<version>4.2.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2020 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2020 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2020 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2020 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -322,7 +322,7 @@ public class AbstractMicrobenchmark {
|
||||
try {
|
||||
ResultsWriter.forUri(uri).write(results);
|
||||
} catch (Exception e) {
|
||||
System.err.println(String.format("Cannot save benchmark results to '%s'. Error was %s.", uri, e));
|
||||
System.err.println(String.format("Cannot save benchmark results to '%s'; Error was %s", uri, e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2020 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2020 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -96,15 +96,14 @@ class MongoResultsWriter implements ResultsWriter {
|
||||
for (Object key : doc.keySet()) {
|
||||
|
||||
Object value = doc.get(key);
|
||||
if (value instanceof Document) {
|
||||
value = fixDocumentKeys((Document) value);
|
||||
} else if (value instanceof BasicDBObject) {
|
||||
value = fixDocumentKeys(new Document((BasicDBObject) value));
|
||||
if (value instanceof Document document) {
|
||||
value = fixDocumentKeys(document);
|
||||
} else if (value instanceof BasicDBObject basicDBObject) {
|
||||
value = fixDocumentKeys(new Document(basicDBObject));
|
||||
}
|
||||
|
||||
if (key instanceof String) {
|
||||
if (key instanceof String newKey) {
|
||||
|
||||
String newKey = (String) key;
|
||||
if (newKey.contains(".")) {
|
||||
newKey = newKey.replace('.', ',');
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2020 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
@@ -14,13 +15,18 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<version>4.2.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
<project.root>${basedir}/..</project.root>
|
||||
<dist.key>SDMONGO</dist.key>
|
||||
|
||||
<!-- Observability -->
|
||||
<micrometer-docs-generator.inputPath>${maven.multiModuleProjectDirectory}/spring-data-mongodb/</micrometer-docs-generator.inputPath>
|
||||
<micrometer-docs-generator.inclusionPattern>.*</micrometer-docs-generator.inclusionPattern>
|
||||
<micrometer-docs-generator.outputPath>${maven.multiModuleProjectDirectory}/target/</micrometer-docs-generator.outputPath>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
@@ -29,12 +35,43 @@
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>generate-docs</id>
|
||||
<phase>generate-resources</phase>
|
||||
<goals>
|
||||
<goal>java</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<mainClass>io.micrometer.docs.DocsGeneratorCommand</mainClass>
|
||||
<includePluginDependencies>true</includePluginDependencies>
|
||||
<arguments>
|
||||
<argument>${micrometer-docs-generator.inputPath}</argument>
|
||||
<argument>${micrometer-docs-generator.inclusionPattern}</argument>
|
||||
<argument>${micrometer-docs-generator.outputPath}</argument>
|
||||
</arguments>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-docs-generator</artifactId>
|
||||
<version>1.0.1</version>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}
|
||||
</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
@@ -11,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<version>4.2.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -87,6 +89,13 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>jsr305</artifactId>
|
||||
<version>3.0.2</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
<dependency>
|
||||
@@ -103,6 +112,13 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-crypt</artifactId>
|
||||
<version>1.8.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
@@ -115,27 +131,6 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
<version>${rxjava}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava-reactive-streams</artifactId>
|
||||
<version>${rxjava-reactive-streams}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex.rxjava2</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
<version>${rxjava2}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex.rxjava3</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
@@ -145,12 +140,6 @@
|
||||
|
||||
<!-- CDI -->
|
||||
<!-- Dependency order required to build against CDI 1.0 and test with CDI 2.0 -->
|
||||
<dependency>
|
||||
<groupId>org.apache.geronimo.specs</groupId>
|
||||
<artifactId>geronimo-jcdi_2.0_spec</artifactId>
|
||||
<version>1.0.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.interceptor</groupId>
|
||||
@@ -160,31 +149,48 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.enterprise</groupId>
|
||||
<artifactId>cdi-api</artifactId>
|
||||
<groupId>jakarta.enterprise</groupId>
|
||||
<artifactId>jakarta.enterprise.cdi-api</artifactId>
|
||||
<version>${cdi}</version>
|
||||
<scope>provided</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.annotation</groupId>
|
||||
<artifactId>javax.annotation-api</artifactId>
|
||||
<version>${javax-annotation-api}</version>
|
||||
<groupId>jakarta.annotation</groupId>
|
||||
<artifactId>jakarta.annotation-api</artifactId>
|
||||
<version>${jakarta-annotation-api}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.openwebbeans</groupId>
|
||||
<artifactId>openwebbeans-se</artifactId>
|
||||
<classifier>jakarta</classifier>
|
||||
<version>${webbeans}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.openwebbeans</groupId>
|
||||
<artifactId>openwebbeans-spi</artifactId>
|
||||
<classifier>jakarta</classifier>
|
||||
<version>${webbeans}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.openwebbeans</groupId>
|
||||
<artifactId>openwebbeans-impl</artifactId>
|
||||
<classifier>jakarta</classifier>
|
||||
<version>${webbeans}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- JSR 303 Validation -->
|
||||
<dependency>
|
||||
<groupId>javax.validation</groupId>
|
||||
<artifactId>validation-api</artifactId>
|
||||
<groupId>jakarta.validation</groupId>
|
||||
<artifactId>jakarta.validation-api</artifactId>
|
||||
<version>${validation}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
@@ -197,30 +203,37 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-observation</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-tracing</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate.validator</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>5.4.3.Final</version>
|
||||
<version>7.0.1.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>jakarta.el</groupId>
|
||||
<artifactId>jakarta.el-api</artifactId>
|
||||
<version>4.0.0</version>
|
||||
<scope>provided</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.glassfish</groupId>
|
||||
<artifactId>javax.el</artifactId>
|
||||
<version>3.0.1-b11</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${jodatime}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.threeten</groupId>
|
||||
<artifactId>threetenbp</artifactId>
|
||||
<version>${threetenbp}</version>
|
||||
<artifactId>jakarta.el</artifactId>
|
||||
<version>4.0.2</version>
|
||||
<scope>provided</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
@@ -230,13 +243,6 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>nl.jqno.equalsverifier</groupId>
|
||||
<artifactId>equalsverifier</artifactId>
|
||||
@@ -272,9 +278,9 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.transaction</groupId>
|
||||
<artifactId>jta</artifactId>
|
||||
<version>1.1</version>
|
||||
<groupId>jakarta.transaction</groupId>
|
||||
<artifactId>jakarta.transaction-api</artifactId>
|
||||
<version>2.0.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
@@ -305,11 +311,43 @@
|
||||
|
||||
<dependency>
|
||||
<groupId>io.mockk</groupId>
|
||||
<artifactId>mockk</artifactId>
|
||||
<artifactId>mockk-jvm</artifactId>
|
||||
<version>${mockk}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-test</artifactId>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.github.tomakehurst</groupId>
|
||||
<artifactId>wiremock-jre8-standalone</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-tracing-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-tracing-integration-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- jMolecules -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jmolecules</groupId>
|
||||
<artifactId>jmolecules-ddd</artifactId>
|
||||
<version>${jmolecules}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
@@ -334,8 +372,11 @@
|
||||
<goal>test-process</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>target/generated-test-sources</outputDirectory>
|
||||
<processor>org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor</processor>
|
||||
<outputDirectory>target/generated-test-sources
|
||||
</outputDirectory>
|
||||
<processor>
|
||||
org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor
|
||||
</processor>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
@@ -355,7 +396,9 @@
|
||||
<exclude>**/ReactivePerformanceTests.java</exclude>
|
||||
</excludes>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||
<java.util.logging.config.file>
|
||||
src/test/resources/logging.properties
|
||||
</java.util.logging.config.file>
|
||||
<reactor.trace.cancel>true</reactor.trace.cancel>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
|
||||
@@ -0,0 +1,144 @@
|
||||
/*
|
||||
* Copyright 2021-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.DocumentCodec;
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* A {@link MongoExpression} using the {@link ParameterBindingDocumentCodec} for parsing a raw ({@literal json})
|
||||
* expression. The expression will be wrapped within <code>{ ... }</code> if necessary. The actual parsing and parameter
|
||||
* binding of placeholders like {@code ?0} is delayed upon first call on the the target {@link Document} via
|
||||
* {@link #toDocument()}.
|
||||
* <br />
|
||||
*
|
||||
* <pre class="code">
|
||||
* $toUpper : $name -> { '$toUpper' : '$name' }
|
||||
*
|
||||
* { '$toUpper' : '$name' } -> { '$toUpper' : '$name' }
|
||||
*
|
||||
* { '$toUpper' : '?0' }, "$name" -> { '$toUpper' : '$name' }
|
||||
* </pre>
|
||||
*
|
||||
* Some types might require a special {@link org.bson.codecs.Codec}. If so, make sure to provide a {@link CodecRegistry}
|
||||
* containing the required {@link org.bson.codecs.Codec codec} via {@link #withCodecRegistry(CodecRegistry)}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
public class BindableMongoExpression implements MongoExpression {
|
||||
|
||||
private final String expressionString;
|
||||
|
||||
private final @Nullable CodecRegistryProvider codecRegistryProvider;
|
||||
|
||||
private final @Nullable Object[] args;
|
||||
|
||||
private final Lazy<Document> target;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link BindableMongoExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param args can be {@literal null}.
|
||||
*/
|
||||
public BindableMongoExpression(String expression, @Nullable Object[] args) {
|
||||
this(expression, null, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link BindableMongoExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param codecRegistryProvider can be {@literal null}.
|
||||
* @param args can be {@literal null}.
|
||||
*/
|
||||
public BindableMongoExpression(String expression, @Nullable CodecRegistryProvider codecRegistryProvider,
|
||||
@Nullable Object[] args) {
|
||||
|
||||
this.expressionString = expression;
|
||||
this.codecRegistryProvider = codecRegistryProvider;
|
||||
this.args = args;
|
||||
this.target = Lazy.of(this::parse);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the {@link CodecRegistry} used to convert expressions.
|
||||
*
|
||||
* @param codecRegistry must not be {@literal null}.
|
||||
* @return new instance of {@link BindableMongoExpression}.
|
||||
*/
|
||||
public BindableMongoExpression withCodecRegistry(CodecRegistry codecRegistry) {
|
||||
return new BindableMongoExpression(expressionString, () -> codecRegistry, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the arguments to bind to the placeholders via their index.
|
||||
*
|
||||
* @param args must not be {@literal null}.
|
||||
* @return new instance of {@link BindableMongoExpression}.
|
||||
*/
|
||||
public BindableMongoExpression bind(Object... args) {
|
||||
return new BindableMongoExpression(expressionString, codecRegistryProvider, args);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document toDocument() {
|
||||
return target.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args="
|
||||
+ Arrays.toString(args) + '}';
|
||||
}
|
||||
|
||||
private Document parse() {
|
||||
|
||||
String expression = wrapJsonIfNecessary(expressionString);
|
||||
|
||||
if (ObjectUtils.isEmpty(args)) {
|
||||
|
||||
if (codecRegistryProvider == null) {
|
||||
return Document.parse(expression);
|
||||
}
|
||||
|
||||
return Document.parse(expression, codecRegistryProvider.getCodecFor(Document.class)
|
||||
.orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry())));
|
||||
}
|
||||
|
||||
ParameterBindingDocumentCodec codec = codecRegistryProvider == null ? new ParameterBindingDocumentCodec()
|
||||
: new ParameterBindingDocumentCodec(codecRegistryProvider.getCodecRegistry());
|
||||
return codec.decode(expression, args);
|
||||
}
|
||||
|
||||
private static String wrapJsonIfNecessary(String json) {
|
||||
|
||||
if (StringUtils.hasText(json) && (json.startsWith("{") && json.endsWith("}"))) {
|
||||
return json;
|
||||
}
|
||||
|
||||
return "{" + json + "}";
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2020 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2020 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -62,7 +62,7 @@ public interface CodecRegistryProvider {
|
||||
*/
|
||||
default <T> Optional<Codec<T>> getCodecFor(Class<T> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
Assert.notNull(type, "Type must not be null");
|
||||
|
||||
try {
|
||||
return Optional.of(getCodecRegistry().get(type));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2020 the original author or authors.
|
||||
* Copyright 2010-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2020 the original author or authors.
|
||||
* Copyright 2013-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,8 +20,8 @@ import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Helper class featuring helper methods for working with MongoDb collections.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* <br />
|
||||
* <br />
|
||||
* Mainly intended for internal use within the framework.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,7 +30,7 @@ import com.mongodb.client.MongoDatabase;
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining
|
||||
* {@link ClientSession session bound} resources, such as {@link MongoDatabase} and
|
||||
* {@link com.mongodb.client.MongoCollection} suitable for transactional usage.
|
||||
* <p />
|
||||
* <br />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
@@ -43,7 +43,7 @@ public class MongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -56,7 +56,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -71,7 +71,7 @@ public class MongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -85,7 +85,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -102,9 +102,10 @@ public class MongoDatabaseUtils {
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "Factory must not be null!");
|
||||
Assert.notNull(factory, "Factory must not be null");
|
||||
|
||||
if (!TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
if (sessionSynchronization == SessionSynchronization.NEVER
|
||||
|| !TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
@@ -192,19 +193,11 @@ public class MongoDatabaseUtils {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#shouldReleaseBeforeCompletion()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldReleaseBeforeCompletion() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void processResourceAfterCommit(MongoResourceHolder resourceHolder) {
|
||||
|
||||
@@ -213,10 +206,6 @@ public class MongoDatabaseUtils {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#afterCompletion(int)
|
||||
*/
|
||||
@Override
|
||||
public void afterCompletion(int status) {
|
||||
|
||||
@@ -227,10 +216,6 @@ public class MongoDatabaseUtils {
|
||||
super.afterCompletion(status);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void releaseResource(MongoResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Interface for factories creating {@link MongoDatabase} instances.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactory} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public interface MongoDbFactory extends MongoDatabaseFactory {
|
||||
|
||||
/**
|
||||
* Creates a default {@link MongoDatabase} instance.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
default MongoDatabase getDb() throws DataAccessException {
|
||||
return getMongoDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase(String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
default MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return getMongoDatabase(dbName);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Copyright 2021-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
/**
|
||||
* Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when
|
||||
* passed on to the driver.
|
||||
* <br />
|
||||
* A set of predefined {@link MongoExpression expressions}, including a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method
|
||||
* like expressions (eg. {@code toUpper(name)}) are available via the
|
||||
* {@link org.springframework.data.mongodb.core.aggregation Aggregation API}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ArithmeticOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ArrayOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ComparisonOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ConvertOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.DateOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ObjectOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SetOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.StringOperators
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface MongoExpression {
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoExpression} from plain {@link String} (eg. {@code $toUpper : $name}). <br />
|
||||
* The given expression will be wrapped with <code>{ ... }</code> to match an actual MongoDB {@link org.bson.Document}
|
||||
* if necessary.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link MongoExpression}.
|
||||
*/
|
||||
static MongoExpression create(String expression) {
|
||||
return new BindableMongoExpression(expression, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoExpression} from plain {@link String} containing placeholders (eg. {@code $toUpper : ?0})
|
||||
* that will be resolved on first call of {@link #toDocument()}. <br />
|
||||
* The given expression will be wrapped with <code>{ ... }</code> to match an actual MongoDB {@link org.bson.Document}
|
||||
* if necessary.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link MongoExpression}.
|
||||
*/
|
||||
static MongoExpression create(String expression, Object... args) {
|
||||
return new BindableMongoExpression(expression, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the native {@link org.bson.Document} representation.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
org.bson.Document toDocument();
|
||||
}
|
||||
@@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Copyright 2022-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.springframework.data.domain.ManagedTypes;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 4.0
|
||||
*/
|
||||
public final class MongoManagedTypes implements ManagedTypes {
|
||||
|
||||
private final ManagedTypes delegate;
|
||||
|
||||
private MongoManagedTypes(ManagedTypes types) {
|
||||
this.delegate = types;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wraps an existing {@link ManagedTypes} object with {@link MongoManagedTypes}.
|
||||
*
|
||||
* @param managedTypes
|
||||
* @return
|
||||
*/
|
||||
public static MongoManagedTypes from(ManagedTypes managedTypes) {
|
||||
return new MongoManagedTypes(managedTypes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method used to construct {@link MongoManagedTypes} from the given array of {@link Class types}.
|
||||
*
|
||||
* @param types array of {@link Class types} used to initialize the {@link ManagedTypes}; must not be {@literal null}.
|
||||
* @return new instance of {@link MongoManagedTypes} initialized from {@link Class types}.
|
||||
*/
|
||||
public static MongoManagedTypes from(Class<?>... types) {
|
||||
return fromIterable(Arrays.asList(types));
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method used to construct {@link MongoManagedTypes} from the given, required {@link Iterable} of
|
||||
* {@link Class types}.
|
||||
*
|
||||
* @param types {@link Iterable} of {@link Class types} used to initialize the {@link ManagedTypes}; must not be
|
||||
* {@literal null}.
|
||||
* @return new instance of {@link MongoManagedTypes} initialized the given, required {@link Iterable} of {@link Class
|
||||
* types}.
|
||||
*/
|
||||
public static MongoManagedTypes fromIterable(Iterable<? extends Class<?>> types) {
|
||||
return from(ManagedTypes.fromIterable(types));
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to return an empty {@link MongoManagedTypes} object.
|
||||
*
|
||||
* @return an empty {@link MongoManagedTypes} object.
|
||||
*/
|
||||
public static MongoManagedTypes empty() {
|
||||
return from(ManagedTypes.empty());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forEach(Consumer<Class<?>> action) {
|
||||
delegate.forEach(action);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,7 +24,7 @@ import com.mongodb.client.ClientSession;
|
||||
/**
|
||||
* MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}.
|
||||
* {@link MongoTransactionManager} binds instances of this class to the thread.
|
||||
* <p />
|
||||
* <br />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
@@ -68,7 +68,7 @@ class MongoResourceHolder extends ResourceHolderSupport {
|
||||
ClientSession session = getSession();
|
||||
|
||||
if (session == null) {
|
||||
throw new IllegalStateException("No session available!");
|
||||
throw new IllegalStateException("No session available");
|
||||
}
|
||||
|
||||
return session;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -37,18 +37,18 @@ import com.mongodb.client.ClientSession;
|
||||
/**
|
||||
* A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread.
|
||||
* <p />
|
||||
* <br />
|
||||
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal
|
||||
* consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction()
|
||||
* commit} or {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* <br />
|
||||
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard
|
||||
* {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as
|
||||
* {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* <br />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override
|
||||
* {@link #doCommit(MongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
@@ -69,11 +69,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <br />
|
||||
* <strong>Note:</strong>The {@link MongoDatabaseFactory db factory} has to be
|
||||
* {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a
|
||||
* {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
@@ -100,16 +100,12 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
*/
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null");
|
||||
|
||||
this.dbFactory = dbFactory;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doGetTransaction()
|
||||
*/
|
||||
@Override
|
||||
protected Object doGetTransaction() throws TransactionException {
|
||||
|
||||
@@ -118,19 +114,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
return new MongoTransactionObject(resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#isExistingTransaction(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doBegin(java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||
*/
|
||||
@Override
|
||||
protected void doBegin(Object transaction, TransactionDefinition definition) throws TransactionException {
|
||||
|
||||
@@ -160,10 +148,6 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doSuspend(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Object doSuspend(Object transaction) throws TransactionException {
|
||||
|
||||
@@ -173,19 +157,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
return TransactionSynchronizationManager.unbindResource(getRequiredDbFactory());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doResume(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void doResume(@Nullable Object transaction, Object suspendedResources) {
|
||||
TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), suspendedResources);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doCommit(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||
*/
|
||||
@Override
|
||||
protected final void doCommit(DefaultTransactionStatus status) throws TransactionException {
|
||||
|
||||
@@ -212,8 +188,8 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* By default those labels are ignored, nevertheless one might check for
|
||||
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
|
||||
* commit. <br />
|
||||
* <pre>
|
||||
* <code>
|
||||
* <pre>
|
||||
* int retries = 3;
|
||||
* do {
|
||||
* try {
|
||||
@@ -226,8 +202,8 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* }
|
||||
* Thread.sleep(500);
|
||||
* } while (--retries > 0);
|
||||
* </pre>
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @param transactionObject never {@literal null}.
|
||||
* @throws Exception in case of transaction errors.
|
||||
@@ -236,10 +212,6 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
transactionObject.commitTransaction();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doRollback(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||
*/
|
||||
@Override
|
||||
protected void doRollback(DefaultTransactionStatus status) throws TransactionException {
|
||||
|
||||
@@ -259,10 +231,6 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doSetRollbackOnly(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||
*/
|
||||
@Override
|
||||
protected void doSetRollbackOnly(DefaultTransactionStatus status) throws TransactionException {
|
||||
|
||||
@@ -270,10 +238,6 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
transactionObject.getRequiredResourceHolder().setRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doCleanupAfterCompletion(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void doCleanupAfterCompletion(Object transaction) {
|
||||
|
||||
@@ -302,7 +266,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
*/
|
||||
public void setDbFactory(MongoDatabaseFactory dbFactory) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null");
|
||||
this.dbFactory = dbFactory;
|
||||
}
|
||||
|
||||
@@ -325,19 +289,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceTransactionManager#getResourceFactory()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabaseFactory getResourceFactory() {
|
||||
return getRequiredDbFactory();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
getRequiredDbFactory();
|
||||
@@ -359,7 +315,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
private MongoDatabaseFactory getRequiredDbFactory() {
|
||||
|
||||
Assert.state(dbFactory != null,
|
||||
"MongoTransactionManager operates upon a MongoDbFactory. Did you forget to provide one? It's required.");
|
||||
"MongoTransactionManager operates upon a MongoDbFactory; Did you forget to provide one; It's required");
|
||||
|
||||
return dbFactory;
|
||||
}
|
||||
@@ -397,7 +353,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber());
|
||||
debugString += String.format("closed = %d, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("closed = %b, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
} else {
|
||||
debugString += "id = n/a";
|
||||
@@ -494,30 +450,22 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
|
||||
private MongoResourceHolder getRequiredResourceHolder() {
|
||||
|
||||
Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present. o_O");
|
||||
Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present; o_O");
|
||||
return resourceHolder;
|
||||
}
|
||||
|
||||
private ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null");
|
||||
return session;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||
*/
|
||||
@Override
|
||||
public boolean isRollbackOnly() {
|
||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||
*/
|
||||
@Override
|
||||
public void flush() {
|
||||
TransactionSynchronizationUtils.triggerFlush();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2016-2020 the original author or authors.
|
||||
* Copyright 2016-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -36,7 +36,7 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
* Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for
|
||||
* obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection}
|
||||
* suitable for transactional usage.
|
||||
* <p />
|
||||
* <br />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
@@ -75,7 +75,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -88,7 +88,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -104,7 +104,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -119,7 +119,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -136,7 +136,11 @@ public class ReactiveMongoDatabaseUtils {
|
||||
private static Mono<MongoDatabase> doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null!");
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null");
|
||||
|
||||
if (sessionSynchronization == SessionSynchronization.NEVER) {
|
||||
return getMongoDatabaseOrDefault(dbName, factory);
|
||||
}
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction()
|
||||
.filter(TransactionSynchronizationManager::isSynchronizationActive) //
|
||||
@@ -210,19 +214,11 @@ public class ReactiveMongoDatabaseUtils {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#shouldReleaseBeforeCompletion()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldReleaseBeforeCompletion() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
@@ -233,10 +229,6 @@ public class ReactiveMongoDatabaseUtils {
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#afterCompletion(int)
|
||||
*/
|
||||
@Override
|
||||
public Mono<Void> afterCompletion(int status) {
|
||||
|
||||
@@ -252,10 +244,6 @@ public class ReactiveMongoDatabaseUtils {
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,7 +24,7 @@ import com.mongodb.reactivestreams.client.ClientSession;
|
||||
/**
|
||||
* MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds
|
||||
* instances of this class to the subscriber context.
|
||||
* <p />
|
||||
* <br />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -38,21 +38,21 @@ import com.mongodb.reactivestreams.client.ClientSession;
|
||||
* A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Binds a {@link ClientSession} from the specified
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber
|
||||
* {@link reactor.util.context.Context}.
|
||||
* <p />
|
||||
* <br />
|
||||
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a
|
||||
* {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start},
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or
|
||||
* {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* <br />
|
||||
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead
|
||||
* of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring
|
||||
* classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* <br />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override
|
||||
* {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
@@ -71,11 +71,11 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <br />
|
||||
* <strong>Note:</strong>The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to
|
||||
* be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor
|
||||
* to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* <br />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
@@ -104,16 +104,12 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
@Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null");
|
||||
|
||||
this.databaseFactory = databaseFactory;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doGetTransaction(org.springframework.transaction.reactive.TransactionSynchronizationManager)
|
||||
*/
|
||||
@Override
|
||||
protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager)
|
||||
throws TransactionException {
|
||||
@@ -123,19 +119,11 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
return new ReactiveMongoTransactionObject(resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#isExistingTransaction(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doBegin(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction,
|
||||
TransactionDefinition definition) throws TransactionException {
|
||||
@@ -175,10 +163,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSuspend(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Object> doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction)
|
||||
throws TransactionException {
|
||||
@@ -192,10 +176,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doResume(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction,
|
||||
Object suspendedResources) {
|
||||
@@ -203,10 +183,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
.fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCommit(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected final Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
@@ -243,10 +219,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
return transactionObject.commitTransaction();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doRollback(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doRollback(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) {
|
||||
@@ -268,10 +240,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSetRollbackOnly(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
@@ -282,10 +250,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCleanupAfterCompletion(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager,
|
||||
Object transaction) {
|
||||
@@ -317,7 +281,7 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
*/
|
||||
public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null");
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
@@ -340,10 +304,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
getRequiredDatabaseFactory();
|
||||
@@ -363,7 +323,7 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() {
|
||||
|
||||
Assert.state(databaseFactory != null,
|
||||
"ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory. Did you forget to provide one? It's required.");
|
||||
"ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory; Did you forget to provide one; It's required");
|
||||
|
||||
return databaseFactory;
|
||||
}
|
||||
@@ -401,7 +361,7 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber());
|
||||
debugString += String.format("closed = %d, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("closed = %b, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
} else {
|
||||
debugString += "id = n/a";
|
||||
@@ -498,30 +458,22 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
|
||||
private ReactiveMongoResourceHolder getRequiredResourceHolder() {
|
||||
|
||||
Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present. o_O");
|
||||
Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present; o_O");
|
||||
return resourceHolder;
|
||||
}
|
||||
|
||||
private ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null");
|
||||
return session;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||
*/
|
||||
@Override
|
||||
public boolean isRollbackOnly() {
|
||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||
*/
|
||||
@Override
|
||||
public void flush() {
|
||||
throw new UnsupportedOperationException("flush() not supported");
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -35,7 +35,7 @@ import com.mongodb.session.ClientSession;
|
||||
/**
|
||||
* {@link MethodInterceptor} implementation looking up and invoking an alternative target method having
|
||||
* {@link ClientSession} as its first argument. This allows seamless integration with the existing code base.
|
||||
* <p />
|
||||
* <br />
|
||||
* The {@link MethodInterceptor} is aware of methods on {@code MongoCollection} that my return new instances of itself
|
||||
* like (eg. {@link com.mongodb.reactivestreams.client.MongoCollection#withWriteConcern(WriteConcern)} and decorate them
|
||||
* if not already proxied.
|
||||
@@ -76,13 +76,13 @@ public class SessionAwareMethodInterceptor<D, C> implements MethodInterceptor {
|
||||
Class<D> databaseType, ClientSessionOperator<D> databaseDecorator, Class<C> collectionType,
|
||||
ClientSessionOperator<C> collectionDecorator) {
|
||||
|
||||
Assert.notNull(session, "ClientSession must not be null!");
|
||||
Assert.notNull(target, "Target must not be null!");
|
||||
Assert.notNull(sessionType, "SessionType must not be null!");
|
||||
Assert.notNull(databaseType, "Database type must not be null!");
|
||||
Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null!");
|
||||
Assert.notNull(collectionType, "Collection type must not be null!");
|
||||
Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null!");
|
||||
Assert.notNull(session, "ClientSession must not be null");
|
||||
Assert.notNull(target, "Target must not be null");
|
||||
Assert.notNull(sessionType, "SessionType must not be null");
|
||||
Assert.notNull(databaseType, "Database type must not be null");
|
||||
Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null");
|
||||
Assert.notNull(collectionType, "Collection type must not be null");
|
||||
Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null");
|
||||
|
||||
this.session = session;
|
||||
this.target = target;
|
||||
@@ -95,10 +95,6 @@ public class SessionAwareMethodInterceptor<D, C> implements MethodInterceptor {
|
||||
this.sessionType = sessionType;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.aopalliance.intercept.MethodInterceptor(org.aopalliance.intercept.MethodInvocation)
|
||||
*/
|
||||
@Nullable
|
||||
@Override
|
||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,13 +15,20 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
|
||||
/**
|
||||
* {@link SessionSynchronization} is used along with {@link org.springframework.data.mongodb.core.MongoTemplate} to
|
||||
* define in which type of transactions to participate if any.
|
||||
* {@link SessionSynchronization} is used along with {@code MongoTemplate} to define in which type of transactions to
|
||||
* participate if any.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
* @see MongoTemplate#setSessionSynchronization(SessionSynchronization)
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization)
|
||||
* @see ReactiveMongoTemplate#setSessionSynchronization(SessionSynchronization)
|
||||
* @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization)
|
||||
*/
|
||||
public enum SessionSynchronization {
|
||||
|
||||
@@ -34,5 +41,12 @@ public enum SessionSynchronization {
|
||||
/**
|
||||
* Synchronize with native MongoDB transactions initiated via {@link MongoTransactionManager}.
|
||||
*/
|
||||
ON_ACTUAL_TRANSACTION;
|
||||
ON_ACTUAL_TRANSACTION,
|
||||
|
||||
/**
|
||||
* Do not participate in ongoing transactions.
|
||||
*
|
||||
* @since 3.2.5
|
||||
*/
|
||||
NEVER;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
* Copyright 2020-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,8 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -31,7 +31,7 @@ import com.mongodb.MongoDriverInformation;
|
||||
*/
|
||||
public class SpringDataMongoDB {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SpringDataMongoDB.class);
|
||||
private static final Log LOGGER = LogFactory.getLog(SpringDataMongoDB.class);
|
||||
|
||||
private static final Version FALLBACK_VERSION = new Version(3);
|
||||
private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation
|
||||
@@ -48,7 +48,7 @@ public class SpringDataMongoDB {
|
||||
|
||||
/**
|
||||
* Fetches the "Implementation-Version" manifest attribute from the jar file.
|
||||
* <p />
|
||||
* <br />
|
||||
* Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the
|
||||
* version in all environments. In this case the current Major version is returned as a fallback.
|
||||
*
|
||||
@@ -68,7 +68,7 @@ public class SpringDataMongoDB {
|
||||
try {
|
||||
return Version.parse(versionString);
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug("Cannot read Spring Data MongoDB version '{}'.", versionString);
|
||||
LOGGER.debug(String.format("Cannot read Spring Data MongoDB version '%s'.", versionString));
|
||||
}
|
||||
|
||||
return FALLBACK_VERSION;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2020 the original author or authors.
|
||||
* Copyright 2010-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,12 +16,13 @@
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.UncategorizedDataAccessException;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
public class UncategorizedMongoDbException extends UncategorizedDataAccessException {
|
||||
|
||||
private static final long serialVersionUID = -2336595514062364929L;
|
||||
|
||||
public UncategorizedMongoDbException(String msg, Throwable cause) {
|
||||
public UncategorizedMongoDbException(String msg, @Nullable Throwable cause) {
|
||||
super(msg, cause);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,107 @@
|
||||
/*
|
||||
* Copyright 2022-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.aot;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.aot.generate.GenerationContext;
|
||||
import org.springframework.aot.hint.MemberCategory;
|
||||
import org.springframework.aot.hint.TypeReference;
|
||||
import org.springframework.core.annotation.AnnotatedElementUtils;
|
||||
import org.springframework.core.annotation.MergedAnnotations;
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory;
|
||||
import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 4.0
|
||||
*/
|
||||
public class LazyLoadingProxyAotProcessor {
|
||||
|
||||
private boolean generalLazyLoadingProxyContributed = false;
|
||||
|
||||
public void registerLazyLoadingProxyIfNeeded(Class<?> type, GenerationContext generationContext) {
|
||||
|
||||
Set<Field> refFields = getFieldsWithAnnotationPresent(type, Reference.class);
|
||||
if (refFields.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
refFields.stream() //
|
||||
.filter(LazyLoadingProxyAotProcessor::isLazyLoading) //
|
||||
.forEach(field -> {
|
||||
|
||||
if (!generalLazyLoadingProxyContributed) {
|
||||
generationContext.getRuntimeHints().proxies().registerJdkProxy(
|
||||
TypeReference.of(org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class),
|
||||
TypeReference.of(org.springframework.aop.SpringProxy.class),
|
||||
TypeReference.of(org.springframework.aop.framework.Advised.class),
|
||||
TypeReference.of(org.springframework.core.DecoratingProxy.class));
|
||||
generalLazyLoadingProxyContributed = true;
|
||||
}
|
||||
|
||||
if (field.getType().isInterface()) {
|
||||
|
||||
List<Class<?>> interfaces = new ArrayList<>(
|
||||
Arrays.asList(LazyLoadingProxyFactory.prepareFactory(field.getType()).getProxiedInterfaces()));
|
||||
interfaces.add(org.springframework.aop.SpringProxy.class);
|
||||
interfaces.add(org.springframework.aop.framework.Advised.class);
|
||||
interfaces.add(org.springframework.core.DecoratingProxy.class);
|
||||
|
||||
generationContext.getRuntimeHints().proxies().registerJdkProxy(interfaces.toArray(Class[]::new));
|
||||
} else {
|
||||
|
||||
Class<?> proxyClass = LazyLoadingProxyFactory.resolveProxyType(field.getType(),
|
||||
LazyLoadingInterceptor::none);
|
||||
|
||||
// see: spring-projects/spring-framework/issues/29309
|
||||
generationContext.getRuntimeHints().reflection().registerType(proxyClass,
|
||||
MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, MemberCategory.INVOKE_DECLARED_METHODS, MemberCategory.DECLARED_FIELDS);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static boolean isLazyLoading(Field field) {
|
||||
if (AnnotatedElementUtils.isAnnotated(field, DBRef.class)) {
|
||||
return AnnotatedElementUtils.findMergedAnnotation(field, DBRef.class).lazy();
|
||||
}
|
||||
if (AnnotatedElementUtils.isAnnotated(field, DocumentReference.class)) {
|
||||
return AnnotatedElementUtils.findMergedAnnotation(field, DocumentReference.class).lazy();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static Set<Field> getFieldsWithAnnotationPresent(Class<?> type, Class<? extends Annotation> annotation) {
|
||||
|
||||
Set<Field> fields = new LinkedHashSet<>();
|
||||
for (Field field : type.getDeclaredFields()) {
|
||||
if (MergedAnnotations.from(field).get(annotation).isPresent()) {
|
||||
fields.add(field);
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
* Copyright 2022-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.aot;
|
||||
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.util.ReactiveWrappers;
|
||||
import org.springframework.data.util.ReactiveWrappers.ReactiveLibrary;
|
||||
import org.springframework.data.util.TypeUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 4.0
|
||||
*/
|
||||
public class MongoAotPredicates {
|
||||
|
||||
public static final Predicate<Class<?>> IS_SIMPLE_TYPE = (type) -> MongoSimpleTypes.HOLDER.isSimpleType(type) || TypeUtils.type(type).isPartOf("org.bson");
|
||||
public static final Predicate<ReactiveLibrary> IS_REACTIVE_LIBARARY_AVAILABLE = (lib) -> ReactiveWrappers.isAvailable(lib);
|
||||
public static final Predicate<ClassLoader> IS_SYNC_CLIENT_PRESENT = (classLoader) -> ClassUtils.isPresent("com.mongodb.client.MongoClient", classLoader);
|
||||
|
||||
public static boolean isReactorPresent() {
|
||||
return IS_REACTIVE_LIBARARY_AVAILABLE.test(ReactiveWrappers.ReactiveLibrary.PROJECT_REACTOR);
|
||||
}
|
||||
|
||||
public static boolean isSyncClientPresent(@Nullable ClassLoader classLoader) {
|
||||
return IS_SYNC_CLIENT_PRESENT.test(classLoader);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright 2022-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.aot;
|
||||
|
||||
import org.springframework.aot.generate.GenerationContext;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.data.aot.ManagedTypesBeanRegistrationAotProcessor;
|
||||
import org.springframework.data.mongodb.MongoManagedTypes;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2022/06
|
||||
*/
|
||||
class MongoManagedTypesBeanRegistrationAotProcessor extends ManagedTypesBeanRegistrationAotProcessor {
|
||||
|
||||
private final LazyLoadingProxyAotProcessor lazyLoadingProxyAotProcessor = new LazyLoadingProxyAotProcessor();
|
||||
|
||||
public MongoManagedTypesBeanRegistrationAotProcessor() {
|
||||
setModuleIdentifier("mongo");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isMatch(@Nullable Class<?> beanType, @Nullable String beanName) {
|
||||
return isMongoManagedTypes(beanType) || super.isMatch(beanType, beanName);
|
||||
}
|
||||
|
||||
protected boolean isMongoManagedTypes(@Nullable Class<?> beanType) {
|
||||
return beanType != null && ClassUtils.isAssignable(MongoManagedTypes.class, beanType);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void contributeType(ResolvableType type, GenerationContext generationContext) {
|
||||
|
||||
if (MongoAotPredicates.IS_SIMPLE_TYPE.test(type.toClass())) {
|
||||
return;
|
||||
}
|
||||
|
||||
super.contributeType(type, generationContext);
|
||||
lazyLoadingProxyAotProcessor.registerLazyLoadingProxyIfNeeded(type.toClass(), generationContext);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Copyright 2022-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.aot;
|
||||
|
||||
import static org.springframework.data.mongodb.aot.MongoAotPredicates.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.springframework.aot.hint.MemberCategory;
|
||||
import org.springframework.aot.hint.RuntimeHints;
|
||||
import org.springframework.aot.hint.RuntimeHintsRegistrar;
|
||||
import org.springframework.aot.hint.TypeReference;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* {@link RuntimeHintsRegistrar} for repository types and entity callbacks.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 4.0
|
||||
*/
|
||||
class MongoRuntimeHints implements RuntimeHintsRegistrar {
|
||||
|
||||
@Override
|
||||
public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
|
||||
|
||||
hints.reflection().registerTypes(
|
||||
Arrays.asList(TypeReference.of(BeforeConvertCallback.class), TypeReference.of(BeforeSaveCallback.class),
|
||||
TypeReference.of(AfterConvertCallback.class), TypeReference.of(AfterSaveCallback.class)),
|
||||
builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS,
|
||||
MemberCategory.INVOKE_PUBLIC_METHODS));
|
||||
|
||||
registerTransactionProxyHints(hints, classLoader);
|
||||
|
||||
if (isReactorPresent()) {
|
||||
|
||||
hints.reflection()
|
||||
.registerTypes(Arrays.asList(TypeReference.of(ReactiveBeforeConvertCallback.class),
|
||||
TypeReference.of(ReactiveBeforeSaveCallback.class), TypeReference.of(ReactiveAfterConvertCallback.class),
|
||||
TypeReference.of(ReactiveAfterSaveCallback.class)),
|
||||
builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS,
|
||||
MemberCategory.INVOKE_PUBLIC_METHODS));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static void registerTransactionProxyHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
|
||||
|
||||
if (MongoAotPredicates.isSyncClientPresent(classLoader)
|
||||
&& ClassUtils.isPresent("org.springframework.aop.SpringProxy", classLoader)) {
|
||||
|
||||
hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoDatabase"),
|
||||
TypeReference.of("org.springframework.aop.SpringProxy"),
|
||||
TypeReference.of("org.springframework.core.DecoratingProxy"));
|
||||
hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoCollection"),
|
||||
TypeReference.of("org.springframework.aop.SpringProxy"),
|
||||
TypeReference.of("org.springframework.core.DecoratingProxy"));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,9 +25,7 @@ import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
@@ -80,30 +78,12 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
||||
return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration
|
||||
* class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending
|
||||
* {@link AbstractMongoClientConfiguration} the base package will be considered {@code com.acme} unless the method is
|
||||
* overridden to implement alternate behavior.
|
||||
*
|
||||
* @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for
|
||||
* entities.
|
||||
* @deprecated use {@link #getMappingBasePackages()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Nullable
|
||||
protected String getMappingBasePackage() {
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)
|
||||
* @see #mongoDbFactory()
|
||||
*/
|
||||
@Bean
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2016-2020 the original author or authors.
|
||||
* Copyright 2016-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -84,10 +84,10 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,10 +30,6 @@ import com.mongodb.ConnectionString;
|
||||
*/
|
||||
public class ConnectionStringPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String connectionString) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2020 the original author or authors.
|
||||
* Copyright 2013-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
* Copyright 2020-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2020 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2020 the original author or authors.
|
||||
* Copyright 2013-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -34,10 +34,6 @@ import org.w3c.dom.Element;
|
||||
*/
|
||||
class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
||||
throws BeanDefinitionStoreException {
|
||||
@@ -46,10 +42,6 @@ class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
return StringUtils.hasText(id) ? id : BeanNames.GRID_FS_TEMPLATE_BEAN_NAME;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,7 +24,6 @@ import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.beans.BeanMetadataElement;
|
||||
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.BeanDefinitionHolder;
|
||||
import org.springframework.beans.factory.config.RuntimeBeanReference;
|
||||
@@ -64,6 +63,7 @@ import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
@@ -80,7 +80,7 @@ import org.w3c.dom.Element;
|
||||
public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
private static final String BASE_PACKAGE = "base-package";
|
||||
private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("javax.validation.Validator",
|
||||
private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("jakarta.validation.Validator",
|
||||
MappingMongoConverterParser.class.getClassLoader());
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -135,9 +135,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
new BeanComponentDefinition(indexOperationsProviderBuilder.getBeanDefinition(), "indexOperationsProvider"));
|
||||
}
|
||||
|
||||
try {
|
||||
registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME);
|
||||
} catch (NoSuchBeanDefinitionException ignored) {
|
||||
if (!registry.containsBeanDefinition(INDEX_HELPER_BEAN_NAME)) {
|
||||
|
||||
BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoPersistentEntityIndexCreator.class);
|
||||
@@ -151,7 +149,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
BeanDefinition validatingMongoEventListener = potentiallyCreateValidatingMongoEventListener(element, parserContext);
|
||||
|
||||
if (validatingMongoEventListener != null) {
|
||||
if (validatingMongoEventListener != null && !registry.containsBeanDefinition(VALIDATING_EVENT_LISTENER_BEAN_NAME)) {
|
||||
parserContext.registerBeanComponent(
|
||||
new BeanComponentDefinition(validatingMongoEventListener, VALIDATING_EVENT_LISTENER_BEAN_NAME));
|
||||
}
|
||||
@@ -165,15 +163,16 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element element, ParserContext parserContext) {
|
||||
|
||||
String disableValidation = element.getAttribute("disable-validation");
|
||||
boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.valueOf(disableValidation);
|
||||
boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.parseBoolean(disableValidation);
|
||||
|
||||
if (!validationDisabled) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition();
|
||||
RuntimeBeanReference validator = getValidator(builder, parserContext);
|
||||
RuntimeBeanReference validator = getValidator(element, parserContext);
|
||||
|
||||
if (validator != null) {
|
||||
builder.getRawBeanDefinition().setBeanClass(ValidatingMongoEventListener.class);
|
||||
builder.getRawBeanDefinition().setSource(element);
|
||||
builder.addConstructorArgValue(validator);
|
||||
|
||||
return builder.getBeanDefinition();
|
||||
@@ -195,7 +194,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
validatorDef.setSource(source);
|
||||
validatorDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
|
||||
String validatorName = parserContext.getReaderContext().registerWithGeneratedName(validatorDef);
|
||||
parserContext.registerBeanComponent(new BeanComponentDefinition(validatorDef, validatorName));
|
||||
|
||||
return new RuntimeBeanReference(validatorName);
|
||||
}
|
||||
@@ -255,7 +253,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
&& Boolean.parseBoolean(abbreviateFieldNames);
|
||||
|
||||
if (fieldNamingStrategyReferenced && abbreviationActivated) {
|
||||
context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured!",
|
||||
context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured",
|
||||
element);
|
||||
return;
|
||||
}
|
||||
@@ -376,10 +374,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
this.delegates = new HashSet<>(Arrays.asList(filters));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.type.filter.TypeFilter#match(org.springframework.core.type.classreading.MetadataReader, org.springframework.core.type.classreading.MetadataReaderFactory)
|
||||
*/
|
||||
public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory)
|
||||
throws IOException {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2020 the original author or authors.
|
||||
* Copyright 2012-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -47,28 +47,16 @@ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinit
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element)
|
||||
*/
|
||||
@Override
|
||||
protected Class<?> getBeanClass(Element element) {
|
||||
return AuditingEntityCallback.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#shouldGenerateId()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldGenerateId() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder)
|
||||
*/
|
||||
@Override
|
||||
protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2020 the original author or authors.
|
||||
* Copyright 2013-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,11 +18,10 @@ package org.springframework.data.mongodb.config;
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.core.Ordered;
|
||||
import org.springframework.data.auditing.IsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||
@@ -36,68 +35,42 @@ import org.springframework.util.Assert;
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport implements Ordered {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
*/
|
||||
@Override
|
||||
protected Class<? extends Annotation> getAnnotation() {
|
||||
return EnableMongoAuditing.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName()
|
||||
*/
|
||||
@Override
|
||||
protected String getAuditingHandlerBeanName() {
|
||||
return "mongoAuditingHandler";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerBeanDefinitions(org.springframework.core.type.AnnotationMetadata, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
public void registerBeanDefinitions(AnnotationMetadata annotationMetadata, BeanDefinitionRegistry registry) {
|
||||
protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration,
|
||||
BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(annotationMetadata, "AnnotationMetadata must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
super.registerBeanDefinitions(annotationMetadata, registry);
|
||||
builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration)
|
||||
*/
|
||||
@Override
|
||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) {
|
||||
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null!");
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class);
|
||||
|
||||
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class);
|
||||
definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
|
||||
|
||||
builder.addConstructorArgValue(definition.getBeanDefinition());
|
||||
return configureDefaultAuditHandlerAttributes(configuration, builder);
|
||||
return configureDefaultAuditHandlerAttributes(configuration,
|
||||
BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
||||
BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null");
|
||||
|
||||
BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(AuditingEntityCallback.class);
|
||||
@@ -108,4 +81,8 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
AuditingEntityCallback.class.getName(), registry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getOrder() {
|
||||
return Ordered.LOWEST_PRECEDENCE;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2020 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -35,10 +35,6 @@ import org.w3c.dom.Element;
|
||||
*/
|
||||
public class MongoClientParser implements BeanDefinitionParser {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
public BeanDefinition parse(Element element, ParserContext parserContext) {
|
||||
|
||||
Object source = parserContext.extractSource(element);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2016-2020 the original author or authors.
|
||||
* Copyright 2016-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,11 +26,11 @@ import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.MongoManagedTypes;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
@@ -77,14 +77,13 @@ public abstract class MongoConfigurationSupport {
|
||||
*
|
||||
* @see #getMappingBasePackages()
|
||||
* @return
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions)
|
||||
throws ClassNotFoundException {
|
||||
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions,
|
||||
MongoManagedTypes mongoManagedTypes) {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setManagedTypes(mongoManagedTypes);
|
||||
mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
@@ -92,6 +91,16 @@ public abstract class MongoConfigurationSupport {
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link MongoManagedTypes}.
|
||||
* @throws ClassNotFoundException
|
||||
* @since 4.0
|
||||
*/
|
||||
@Bean
|
||||
public MongoManagedTypes mongoManagedTypes() throws ClassNotFoundException {
|
||||
return MongoManagedTypes.fromIterable(getInitialEntitySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
||||
* {@link CustomConversions} will be registered with the
|
||||
@@ -140,8 +149,7 @@ public abstract class MongoConfigurationSupport {
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and
|
||||
* {@link Persistent}.
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document}.
|
||||
*
|
||||
* @param basePackage must not be {@literal null}.
|
||||
* @return
|
||||
@@ -161,7 +169,6 @@ public abstract class MongoConfigurationSupport {
|
||||
ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider(
|
||||
false);
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class));
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class));
|
||||
|
||||
for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) {
|
||||
|
||||
@@ -175,8 +182,7 @@ public abstract class MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Configures whether to abbreviate field names for domain objects by configuring a
|
||||
* {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced
|
||||
* customization needs, consider overriding {@link #mappingMongoConverter()}.
|
||||
* {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@@ -200,7 +206,7 @@ public abstract class MongoConfigurationSupport {
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||
*
|
||||
* @return {@literal false} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
||||
* <strong>INFO:</strong> As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected boolean autoIndexCreation() {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2020 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -51,10 +51,6 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
private static final String OPTIONS_DELIMITER = "?";
|
||||
private static final String OPTION_VALUE_DELIMITER = "&";
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String text) throws IllegalArgumentException {
|
||||
|
||||
@@ -121,7 +117,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism));
|
||||
String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'", authMechanism));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -198,7 +194,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
String[] optionArgs = option.split("=");
|
||||
|
||||
if (optionArgs.length == 1) {
|
||||
throw new IllegalArgumentException(String.format("Query parameter '%s' has no value!", optionArgs[0]));
|
||||
throw new IllegalArgumentException(String.format("Query parameter '%s' has no value", optionArgs[0]));
|
||||
}
|
||||
|
||||
properties.put(optionArgs[0], optionArgs[1]);
|
||||
@@ -213,21 +209,21 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
if (source.length != 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Credentials need to specify username and password like in 'username:password@database'!");
|
||||
"Credentials need to specify username and password like in 'username:password@database'");
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyDatabasePresent(String source) {
|
||||
|
||||
if (!StringUtils.hasText(source)) {
|
||||
throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'!");
|
||||
throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'");
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyUserNamePresent(String[] source) {
|
||||
|
||||
if (source.length == 0 || !StringUtils.hasText(source[0])) {
|
||||
throw new IllegalArgumentException("Credentials need to specify username!");
|
||||
throw new IllegalArgumentException("Credentials need to specify username");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -235,7 +231,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
try {
|
||||
return URLDecoder.decode(it, "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new IllegalArgumentException("o_O UTF-8 not supported!", e);
|
||||
throw new IllegalArgumentException("o_O UTF-8 not supported", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -62,10 +62,6 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Collections.unmodifiableSet(mongoUriAllowedAdditionalAttributes);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
||||
throws BeanDefinitionStoreException {
|
||||
@@ -74,10 +70,6 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
return StringUtils.hasText(id) ? id : BeanNames.DB_FACTORY_BEAN_NAME;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
@@ -171,7 +163,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
if (element.getAttributes().getLength() > allowedAttributesCount) {
|
||||
|
||||
parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually!",
|
||||
parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually",
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,10 +26,6 @@ import org.springframework.beans.factory.xml.NamespaceHandlerSupport;
|
||||
*/
|
||||
public class MongoNamespaceHandler extends NamespaceHandlerSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.NamespaceHandler#init()
|
||||
*/
|
||||
public void init() {
|
||||
|
||||
registerBeanDefinitionParser("mapping-converter", new MappingMongoConverterParser());
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,9 +22,12 @@ import java.util.Map;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.CustomEditorConfigurer;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionValidationException;
|
||||
import org.springframework.beans.factory.support.ManagedMap;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoServerApiFactoryBean;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
@@ -37,7 +40,6 @@ import org.w3c.dom.Element;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
abstract class MongoParsingUtils {
|
||||
|
||||
private MongoParsingUtils() {}
|
||||
@@ -45,7 +47,7 @@ abstract class MongoParsingUtils {
|
||||
/**
|
||||
* Parses the {@code mongo:client-settings} sub-element. Populates the given attribute factory with the proper
|
||||
* attributes.
|
||||
*
|
||||
*
|
||||
* @param element
|
||||
* @param mongoClientBuilder
|
||||
* @return
|
||||
@@ -112,6 +114,20 @@ abstract class MongoParsingUtils {
|
||||
// Field level encryption
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
|
||||
// ServerAPI
|
||||
if (StringUtils.hasText(settingsElement.getAttribute("server-api-version"))) {
|
||||
|
||||
MongoServerApiFactoryBean serverApiFactoryBean = new MongoServerApiFactoryBean();
|
||||
serverApiFactoryBean.setVersion(settingsElement.getAttribute("server-api-version"));
|
||||
try {
|
||||
clientOptionsDefBuilder.addPropertyValue("serverApi", serverApiFactoryBean.getObject());
|
||||
} catch (Exception exception) {
|
||||
throw new BeanDefinitionValidationException("Non parsable server-api.", exception);
|
||||
}
|
||||
} else {
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "server-api-ref", "serverApi");
|
||||
}
|
||||
|
||||
// and the rest
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition());
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -39,10 +39,6 @@ import org.w3c.dom.Element;
|
||||
*/
|
||||
class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
||||
throws BeanDefinitionStoreException {
|
||||
@@ -51,10 +47,6 @@ class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
||||
return StringUtils.hasText(id) ? id : BeanNames.MONGO_TEMPLATE_BEAN_NAME;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
* Copyright 2020-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,7 +28,7 @@ import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
* @author Christoph Strobl
|
||||
* @since 3.1
|
||||
*/
|
||||
class PersistentEntitiesFactoryBean implements FactoryBean<PersistentEntities> {
|
||||
public class PersistentEntitiesFactoryBean implements FactoryBean<PersistentEntities> {
|
||||
|
||||
private final MappingMongoConverter converter;
|
||||
|
||||
@@ -41,19 +41,11 @@ class PersistentEntitiesFactoryBean implements FactoryBean<PersistentEntities> {
|
||||
this.converter = converter;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
@Override
|
||||
public PersistentEntities getObject() {
|
||||
return PersistentEntities.of(converter.getMappingContext());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return PersistentEntities.class;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
* Copyright 2020-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,11 +18,9 @@ package org.springframework.data.mongodb.config;
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||
@@ -34,56 +32,42 @@ import org.springframework.util.Assert;
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableReactiveMongoAuditing} annotation.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 3.1
|
||||
*/
|
||||
class ReactiveMongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
*/
|
||||
@Override
|
||||
protected Class<? extends Annotation> getAnnotation() {
|
||||
return EnableReactiveMongoAuditing.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName()
|
||||
*/
|
||||
@Override
|
||||
protected String getAuditingHandlerBeanName() {
|
||||
return "reactiveMongoAuditingHandler";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration)
|
||||
*/
|
||||
@Override
|
||||
protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration,
|
||||
BeanDefinitionRegistry registry) {
|
||||
builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) {
|
||||
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null!");
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class);
|
||||
|
||||
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class);
|
||||
definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
|
||||
|
||||
builder.addConstructorArgValue(definition.getBeanDefinition());
|
||||
return configureDefaultAuditHandlerAttributes(configuration, builder);
|
||||
return configureDefaultAuditHandlerAttributes(configuration,
|
||||
BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
||||
BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,10 +32,6 @@ import com.mongodb.ReadConcernLevel;
|
||||
*/
|
||||
public class ReadConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String readConcernString) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2020 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,10 +29,6 @@ import com.mongodb.ReadPreference;
|
||||
*/
|
||||
public class ReadPreferencePropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String readPreferenceString) throws IllegalArgumentException {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,8 +21,8 @@ import java.net.UnknownHostException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -43,13 +43,9 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
||||
*/
|
||||
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address {} '{}'. Check your replica set configuration!";
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ServerAddressPropertyEditor.class);
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address %s '%s'; Check your replica set configuration";
|
||||
private static final Log LOG = LogFactory.getLog(ServerAddressPropertyEditor.class);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String replicaSetString) {
|
||||
|
||||
@@ -72,7 +68,7 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
if (serverAddresses.isEmpty()) {
|
||||
throw new IllegalArgumentException(
|
||||
"Could not resolve at least one server of the replica set configuration! Validate your config!");
|
||||
"Could not resolve at least one server of the replica set configuration; Validate your config");
|
||||
}
|
||||
|
||||
setValue(serverAddresses.toArray(new ServerAddress[serverAddresses.size()]));
|
||||
@@ -88,14 +84,18 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
private ServerAddress parseServerAddress(String source) {
|
||||
|
||||
if (!StringUtils.hasText(source)) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
String[] hostAndPort = extractHostAddressAndPort(source.trim());
|
||||
|
||||
if (hostAndPort.length > 2) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -105,9 +105,13 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port);
|
||||
} catch (UnknownHostException e) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]);
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]));
|
||||
}
|
||||
} catch (NumberFormatException e) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]);
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]));
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
@@ -121,7 +125,7 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
*/
|
||||
private String[] extractHostAddressAndPort(String addressAndPortSource) {
|
||||
|
||||
Assert.notNull(addressAndPortSource, "Address and port source must not be null!");
|
||||
Assert.notNull(addressAndPortSource, "Address and port source must not be null");
|
||||
|
||||
String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN);
|
||||
String hostAddress = hostAndPort[0];
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2020 the original author or authors.
|
||||
* Copyright 2012-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,10 +26,6 @@ import com.mongodb.WriteConcern;
|
||||
*/
|
||||
public class StringToWriteConcernConverter implements Converter<String, WriteConcern> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
public WriteConcern convert(String source) {
|
||||
|
||||
WriteConcern writeConcern = WriteConcern.valueOf(source);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
* Copyright 2020-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,10 +29,6 @@ import org.springframework.util.StringUtils;
|
||||
*/
|
||||
public class UUidRepresentationPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String value) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,8 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -24,21 +22,16 @@ import java.util.stream.Collectors;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.CountOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions.DomainTypeMapping;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Utility methods to map {@link org.springframework.data.mongodb.core.aggregation.Aggregation} pipeline definitions and
|
||||
@@ -52,41 +45,46 @@ class AggregationUtil {
|
||||
|
||||
QueryMapper queryMapper;
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
Lazy<AggregationOperationContext> untypedMappingContext;
|
||||
|
||||
AggregationUtil(QueryMapper queryMapper,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
this.queryMapper = queryMapper;
|
||||
this.mappingContext = mappingContext;
|
||||
this.untypedMappingContext = Lazy
|
||||
.of(() -> new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper));
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare the {@link AggregationOperationContext} for a given aggregation by either returning the context itself it
|
||||
* is not {@literal null}, create a {@link TypeBasedAggregationOperationContext} if the aggregation contains type
|
||||
* information (is a {@link TypedAggregation}) or use the {@link Aggregation#DEFAULT_CONTEXT}.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param context can be {@literal null}.
|
||||
* @return the root {@link AggregationOperationContext} to use.
|
||||
*/
|
||||
AggregationOperationContext prepareAggregationContext(Aggregation aggregation,
|
||||
@Nullable AggregationOperationContext context) {
|
||||
AggregationOperationContext createAggregationContext(Aggregation aggregation, @Nullable Class<?> inputType) {
|
||||
|
||||
if (context != null) {
|
||||
return context;
|
||||
}
|
||||
DomainTypeMapping domainTypeMapping = aggregation.getOptions().getDomainTypeMapping();
|
||||
|
||||
if (!(aggregation instanceof TypedAggregation)) {
|
||||
if (domainTypeMapping == DomainTypeMapping.NONE) {
|
||||
return Aggregation.DEFAULT_CONTEXT;
|
||||
}
|
||||
|
||||
Class<?> inputType = ((TypedAggregation) aggregation).getInputType();
|
||||
if (!(aggregation instanceof TypedAggregation)) {
|
||||
|
||||
if(inputType == null) {
|
||||
return untypedMappingContext.get();
|
||||
}
|
||||
|
||||
if (domainTypeMapping == DomainTypeMapping.STRICT
|
||||
&& !aggregation.getPipeline().containsUnionWith()) {
|
||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
if (aggregation.getPipeline().containsUnionWith()) {
|
||||
return new RelaxedTypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||
if (domainTypeMapping == DomainTypeMapping.STRICT
|
||||
&& !aggregation.getPipeline().containsUnionWith()) {
|
||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
return new RelaxedTypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -97,12 +95,7 @@ class AggregationUtil {
|
||||
* @return
|
||||
*/
|
||||
List<Document> createPipeline(Aggregation aggregation, AggregationOperationContext context) {
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
|
||||
return aggregation.toPipeline(context);
|
||||
}
|
||||
|
||||
return mapAggregationPipeline(aggregation.toPipeline(context));
|
||||
return aggregation.toPipeline(context);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -113,63 +106,7 @@ class AggregationUtil {
|
||||
* @return
|
||||
*/
|
||||
Document createCommand(String collection, Aggregation aggregation, AggregationOperationContext context) {
|
||||
|
||||
Document command = aggregation.toDocument(collection, context);
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
|
||||
return command;
|
||||
}
|
||||
|
||||
command.put("pipeline", mapAggregationPipeline(command.get("pipeline", List.class)));
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@code $count} aggregation for {@link Query} and optionally a {@link Class entity class}.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param entityClass can be {@literal null} if the {@link Query} object is empty.
|
||||
* @return the {@link Aggregation} pipeline definition to run a {@code $count} aggregation.
|
||||
*/
|
||||
Aggregation createCountAggregation(Query query, @Nullable Class<?> entityClass) {
|
||||
|
||||
List<AggregationOperation> pipeline = computeCountAggregationPipeline(query, entityClass);
|
||||
|
||||
Aggregation aggregation = entityClass != null ? Aggregation.newAggregation(entityClass, pipeline)
|
||||
: Aggregation.newAggregation(pipeline);
|
||||
aggregation.withOptions(AggregationOptions.builder().collation(query.getCollation().orElse(null)).build());
|
||||
|
||||
return aggregation;
|
||||
}
|
||||
|
||||
private List<AggregationOperation> computeCountAggregationPipeline(Query query, @Nullable Class<?> entityType) {
|
||||
|
||||
CountOperation count = Aggregation.count().as("totalEntityCount");
|
||||
if (query.getQueryObject().isEmpty()) {
|
||||
return Collections.singletonList(count);
|
||||
}
|
||||
|
||||
Assert.notNull(entityType, "Entity type must not be null!");
|
||||
|
||||
Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(),
|
||||
mappingContext.getPersistentEntity(entityType));
|
||||
|
||||
CriteriaDefinition criteria = new CriteriaDefinition() {
|
||||
|
||||
@Override
|
||||
public Document getCriteriaObject() {
|
||||
return mappedQuery;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public String getKey() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
return Arrays.asList(Aggregation.match(criteria), count);
|
||||
return aggregation.toDocument(collection, context);
|
||||
}
|
||||
|
||||
private List<Document> mapAggregationPipeline(List<Document> pipeline) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2020 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,16 +19,30 @@ import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.util.Pair;
|
||||
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Bulk operations for insert/update/remove actions on a collection. These bulks operation are available since MongoDB
|
||||
* 2.6 and make use of low level bulk commands on the protocol level. This interface defines a fluent API to add
|
||||
* multiple single operations or list of similar operations in sequence which can then eventually be executed by calling
|
||||
* Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and
|
||||
* make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single
|
||||
* operations or list of similar operations in sequence which can then eventually be executed by calling
|
||||
* {@link #execute()}.
|
||||
*
|
||||
* <pre class="code">
|
||||
* MongoOperations ops = …;
|
||||
*
|
||||
* ops.bulkOps(BulkMode.UNORDERED, Person.class)
|
||||
* .insert(newPerson)
|
||||
* .updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
|
||||
* .execute();
|
||||
* </pre>
|
||||
* <p>
|
||||
* Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations
|
||||
* that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and
|
||||
* the version field remains not populated.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @author Minsu Kim
|
||||
@@ -71,7 +85,19 @@ public interface BulkOperations {
|
||||
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateOne(Query query, Update update);
|
||||
default BulkOperations updateOne(Query query, Update update) {
|
||||
return updateOne(query, (UpdateDefinition) update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
|
||||
*
|
||||
* @param query update criteria, must not be {@literal null}.
|
||||
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
BulkOperations updateOne(Query query, UpdateDefinition update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, only the first matching document is updated.
|
||||
@@ -79,7 +105,7 @@ public interface BulkOperations {
|
||||
* @param updates Update operations to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateOne(List<Pair<Query, Update>> updates);
|
||||
BulkOperations updateOne(List<Pair<Query, UpdateDefinition>> updates);
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
||||
@@ -88,7 +114,19 @@ public interface BulkOperations {
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateMulti(Query query, Update update);
|
||||
default BulkOperations updateMulti(Query query, Update update) {
|
||||
return updateMulti(query, (UpdateDefinition) update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
BulkOperations updateMulti(Query query, UpdateDefinition update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, all matching documents are updated.
|
||||
@@ -96,7 +134,7 @@ public interface BulkOperations {
|
||||
* @param updates Update operations to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateMulti(List<Pair<Query, Update>> updates);
|
||||
BulkOperations updateMulti(List<Pair<Query, UpdateDefinition>> updates);
|
||||
|
||||
/**
|
||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
@@ -106,7 +144,20 @@ public interface BulkOperations {
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations upsert(Query query, Update update);
|
||||
default BulkOperations upsert(Query query, Update update) {
|
||||
return upsert(query, (UpdateDefinition) update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
* else an insert.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
BulkOperations upsert(Query query, UpdateDefinition update);
|
||||
|
||||
/**
|
||||
* Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
@@ -138,7 +189,7 @@ public interface BulkOperations {
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default BulkOperations replaceOne(Query query, Object replacement) {
|
||||
@@ -151,7 +202,7 @@ public interface BulkOperations {
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
||||
|
||||
@@ -0,0 +1,221 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOneModel;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.ReplaceOneModel;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
|
||||
/**
|
||||
* Support class for bulk operations.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 4.1
|
||||
*/
|
||||
abstract class BulkOperationsSupport {
|
||||
|
||||
private final String collectionName;
|
||||
|
||||
BulkOperationsSupport(String collectionName) {
|
||||
|
||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
|
||||
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit a {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param holder
|
||||
*/
|
||||
void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.model() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName));
|
||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit a {@link AfterSaveEvent}.
|
||||
*
|
||||
* @param holder
|
||||
*/
|
||||
void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.model() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName));
|
||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
WriteModel<Document> mapWriteModel(Object source, WriteModel<Document> writeModel) {
|
||||
|
||||
if (writeModel instanceof UpdateOneModel<Document> model) {
|
||||
|
||||
if (source instanceof AggregationUpdate aggregationUpdate) {
|
||||
|
||||
List<Document> pipeline = mapUpdatePipeline(aggregationUpdate);
|
||||
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions());
|
||||
}
|
||||
|
||||
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof UpdateManyModel<Document> model) {
|
||||
|
||||
if (source instanceof AggregationUpdate aggregationUpdate) {
|
||||
|
||||
List<Document> pipeline = mapUpdatePipeline(aggregationUpdate);
|
||||
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions());
|
||||
}
|
||||
|
||||
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteOneModel<Document> model) {
|
||||
return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteManyModel<Document> model) {
|
||||
return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
return writeModel;
|
||||
}
|
||||
|
||||
private List<Document> mapUpdatePipeline(AggregationUpdate source) {
|
||||
|
||||
Class<?> type = entity().isPresent() ? entity().map(PersistentEntity::getType).get() : Object.class;
|
||||
AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type,
|
||||
updateMapper().getMappingContext(), queryMapper());
|
||||
|
||||
return new AggregationUtil(queryMapper(), queryMapper().getMappingContext()).createPipeline(source, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit a {@link ApplicationEvent} if event multicasting is enabled.
|
||||
*
|
||||
* @param event
|
||||
*/
|
||||
protected abstract void maybeEmitEvent(ApplicationEvent event);
|
||||
|
||||
/**
|
||||
* @return the {@link UpdateMapper} to use.
|
||||
*/
|
||||
protected abstract UpdateMapper updateMapper();
|
||||
|
||||
/**
|
||||
* @return the {@link QueryMapper} to use.
|
||||
*/
|
||||
protected abstract QueryMapper queryMapper();
|
||||
|
||||
/**
|
||||
* @return the associated {@link PersistentEntity}. Can be {@link Optional#empty()}.
|
||||
*/
|
||||
protected abstract Optional<? extends MongoPersistentEntity<?>> entity();
|
||||
|
||||
protected Bson getMappedUpdate(Bson update) {
|
||||
return updateMapper().getMappedObject(update, entity());
|
||||
}
|
||||
|
||||
protected Bson getMappedQuery(Bson query) {
|
||||
return queryMapper().getMappedObject(query, entity());
|
||||
}
|
||||
|
||||
protected static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
|
||||
return switch (bulkMode) {
|
||||
case ORDERED -> options.ordered(true);
|
||||
case UNORDERED -> options.ordered(false);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
|
||||
* @param update The {@link Update} to apply
|
||||
* @param upsert flag to indicate if document should be upserted.
|
||||
* @return new instance of {@link UpdateOptions}.
|
||||
*/
|
||||
protected static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
|
||||
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
|
||||
list.add(arrayFilter.asDocument());
|
||||
}
|
||||
options.arrayFilters(list);
|
||||
}
|
||||
|
||||
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
record SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -36,21 +36,29 @@ import com.mongodb.client.model.changestream.OperationType;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Myroslav Kosinskyi
|
||||
* @since 2.1
|
||||
*/
|
||||
public class ChangeStreamEvent<T> {
|
||||
|
||||
@SuppressWarnings("rawtypes") //
|
||||
private static final AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> CONVERTED_UPDATER = AtomicReferenceFieldUpdater
|
||||
.newUpdater(ChangeStreamEvent.class, Object.class, "converted");
|
||||
private static final AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> CONVERTED_FULL_DOCUMENT_UPDATER = AtomicReferenceFieldUpdater
|
||||
.newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocument");
|
||||
|
||||
@SuppressWarnings("rawtypes") //
|
||||
private static final AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER = AtomicReferenceFieldUpdater
|
||||
.newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocumentBeforeChange");
|
||||
|
||||
private final @Nullable ChangeStreamDocument<Document> raw;
|
||||
|
||||
private final Class<T> targetType;
|
||||
private final MongoConverter converter;
|
||||
|
||||
// accessed through CONVERTED_UPDATER.
|
||||
private volatile @Nullable T converted;
|
||||
// accessed through CONVERTED_FULL_DOCUMENT_UPDATER.
|
||||
private volatile @Nullable T convertedFullDocument;
|
||||
|
||||
// accessed through CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER.
|
||||
private volatile @Nullable T convertedFullDocumentBeforeChange;
|
||||
|
||||
/**
|
||||
* @param raw can be {@literal null}.
|
||||
@@ -147,27 +155,43 @@ public class ChangeStreamEvent<T> {
|
||||
@Nullable
|
||||
public T getBody() {
|
||||
|
||||
if (raw == null) {
|
||||
if (raw == null || raw.getFullDocument() == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Document fullDocument = raw.getFullDocument();
|
||||
return getConvertedFullDocument(raw.getFullDocument());
|
||||
}
|
||||
|
||||
if (fullDocument == null) {
|
||||
return targetType.cast(fullDocument);
|
||||
/**
|
||||
* Get the potentially converted {@link ChangeStreamDocument#getFullDocumentBeforeChange() document} before being changed.
|
||||
*
|
||||
* @return {@literal null} when {@link #getRaw()} or {@link ChangeStreamDocument#getFullDocumentBeforeChange()} is
|
||||
* {@literal null}.
|
||||
* @since 4.0
|
||||
*/
|
||||
@Nullable
|
||||
public T getBodyBeforeChange() {
|
||||
|
||||
if (raw == null || raw.getFullDocumentBeforeChange() == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return getConverted(fullDocument);
|
||||
return getConvertedFullDocumentBeforeChange(raw.getFullDocumentBeforeChange());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private T getConverted(Document fullDocument) {
|
||||
return (T) doGetConverted(fullDocument);
|
||||
private T getConvertedFullDocumentBeforeChange(Document fullDocument) {
|
||||
return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER);
|
||||
}
|
||||
|
||||
private Object doGetConverted(Document fullDocument) {
|
||||
@SuppressWarnings("unchecked")
|
||||
private T getConvertedFullDocument(Document fullDocument) {
|
||||
return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_UPDATER);
|
||||
}
|
||||
|
||||
Object result = CONVERTED_UPDATER.get(this);
|
||||
private Object doGetConverted(Document fullDocument, AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> updater) {
|
||||
|
||||
Object result = updater.get(this);
|
||||
|
||||
if (result != null) {
|
||||
return result;
|
||||
@@ -176,30 +200,26 @@ public class ChangeStreamEvent<T> {
|
||||
if (ClassUtils.isAssignable(Document.class, fullDocument.getClass())) {
|
||||
|
||||
result = converter.read(targetType, fullDocument);
|
||||
return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this);
|
||||
return updater.compareAndSet(this, null, result) ? result : updater.get(this);
|
||||
}
|
||||
|
||||
if (converter.getConversionService().canConvert(fullDocument.getClass(), targetType)) {
|
||||
|
||||
result = converter.getConversionService().convert(fullDocument, targetType);
|
||||
return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this);
|
||||
return updater.compareAndSet(this, null, result) ? result : updater.get(this);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
String.format("No converter found capable of converting %s to %s", fullDocument.getClass(), targetType));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ChangeStreamEvent {" + "raw=" + raw + ", targetType=" + targetType + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
public boolean equals(@Nullable Object o) {
|
||||
|
||||
if (this == o)
|
||||
return true;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2020 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,6 +32,7 @@ import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
||||
import com.mongodb.client.model.changestream.FullDocument;
|
||||
import com.mongodb.client.model.changestream.FullDocumentBeforeChange;
|
||||
|
||||
/**
|
||||
* Options applicable to MongoDB <a href="https://docs.mongodb.com/manual/changeStreams/">Change Streams</a>. Intended
|
||||
@@ -40,6 +41,7 @@ import com.mongodb.client.model.changestream.FullDocument;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Myroslav Kosinskyi
|
||||
* @since 2.1
|
||||
*/
|
||||
public class ChangeStreamOptions {
|
||||
@@ -47,6 +49,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable Object filter;
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
@@ -74,6 +77,14 @@ public class ChangeStreamOptions {
|
||||
return Optional.ofNullable(fullDocumentLookup);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @since 4.0
|
||||
*/
|
||||
public Optional<FullDocumentBeforeChange> getFullDocumentBeforeChangeLookup() {
|
||||
return Optional.ofNullable(fullDocumentBeforeChangeLookup);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
@@ -139,21 +150,21 @@ public class ChangeStreamOptions {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
if (timestamp instanceof Instant) {
|
||||
return new BsonTimestamp((int) ((Instant) timestamp).getEpochSecond(), 0);
|
||||
if (timestamp instanceof Instant instant) {
|
||||
return new BsonTimestamp((int) instant.getEpochSecond(), 0);
|
||||
}
|
||||
|
||||
if (timestamp instanceof BsonTimestamp) {
|
||||
return Instant.ofEpochSecond(((BsonTimestamp) timestamp).getTime());
|
||||
if (timestamp instanceof BsonTimestamp bsonTimestamp) {
|
||||
return Instant.ofEpochSecond(bsonTimestamp.getTime());
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
"o_O that should actually not happen. The timestamp should be an Instant or a BsonTimestamp but was "
|
||||
"o_O that should actually not happen; The timestamp should be an Instant or a BsonTimestamp but was "
|
||||
+ ObjectUtils.nullSafeClassName(timestamp));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
@@ -170,6 +181,9 @@ public class ChangeStreamOptions {
|
||||
if (!ObjectUtils.nullSafeEquals(this.fullDocumentLookup, that.fullDocumentLookup)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.fullDocumentBeforeChangeLookup, that.fullDocumentBeforeChangeLookup)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.collation, that.collation)) {
|
||||
return false;
|
||||
}
|
||||
@@ -184,6 +198,7 @@ public class ChangeStreamOptions {
|
||||
int result = ObjectUtils.nullSafeHashCode(filter);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeToken);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentLookup);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentBeforeChangeLookup);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(collation);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeTimestamp);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resume);
|
||||
@@ -220,6 +235,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable Object filter;
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
@@ -234,7 +250,7 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder collation(Collation collation) {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null nor empty!");
|
||||
Assert.notNull(collation, "Collation must not be null nor empty");
|
||||
|
||||
this.collation = collation;
|
||||
return this;
|
||||
@@ -242,13 +258,13 @@ public class ChangeStreamOptions {
|
||||
|
||||
/**
|
||||
* Set the filter to apply.
|
||||
* <p/>
|
||||
* <br />
|
||||
* Fields on aggregation expression root level are prefixed to map to fields contained in
|
||||
* {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns},
|
||||
* {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken
|
||||
* as given, during the mapping procedure. You may want to have a look at the
|
||||
* <a href="https://docs.mongodb.com/manual/reference/change-events/">structure of Change Events</a>.
|
||||
* <p/>
|
||||
* <br />
|
||||
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are
|
||||
* mapped to domain type fields.
|
||||
*
|
||||
@@ -258,7 +274,7 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder filter(Aggregation filter) {
|
||||
|
||||
Assert.notNull(filter, "Filter must not be null!");
|
||||
Assert.notNull(filter, "Filter must not be null");
|
||||
|
||||
this.filter = filter;
|
||||
return this;
|
||||
@@ -287,7 +303,7 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeToken(BsonValue resumeToken) {
|
||||
|
||||
Assert.notNull(resumeToken, "ResumeToken must not be null!");
|
||||
Assert.notNull(resumeToken, "ResumeToken must not be null");
|
||||
|
||||
this.resumeToken = resumeToken;
|
||||
|
||||
@@ -316,12 +332,38 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder fullDocumentLookup(FullDocument lookup) {
|
||||
|
||||
Assert.notNull(lookup, "Lookup must not be null!");
|
||||
Assert.notNull(lookup, "Lookup must not be null");
|
||||
|
||||
this.fullDocumentLookup = lookup;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link FullDocumentBeforeChange} lookup to use.
|
||||
*
|
||||
* @param lookup must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 4.0
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder fullDocumentBeforeChangeLookup(FullDocumentBeforeChange lookup) {
|
||||
|
||||
Assert.notNull(lookup, "Lookup must not be null");
|
||||
|
||||
this.fullDocumentBeforeChangeLookup = lookup;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the full document before being changed if it is available.
|
||||
*
|
||||
* @return this.
|
||||
* @since 4.0
|
||||
* @see #fullDocumentBeforeChangeLookup(FullDocumentBeforeChange)
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder returnFullDocumentBeforeChange() {
|
||||
return fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.WHEN_AVAILABLE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the cluster time to resume from.
|
||||
*
|
||||
@@ -330,7 +372,7 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAt(Instant resumeTimestamp) {
|
||||
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null");
|
||||
|
||||
this.resumeTimestamp = resumeTimestamp;
|
||||
return this;
|
||||
@@ -345,7 +387,7 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) {
|
||||
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null");
|
||||
|
||||
this.resumeTimestamp = resumeTimestamp;
|
||||
return this;
|
||||
@@ -391,6 +433,7 @@ public class ChangeStreamOptions {
|
||||
options.filter = this.filter;
|
||||
options.resumeToken = this.resumeToken;
|
||||
options.fullDocumentLookup = this.fullDocumentLookup;
|
||||
options.fullDocumentBeforeChangeLookup = this.fullDocumentBeforeChangeLookup;
|
||||
options.collation = this.collation;
|
||||
options.resumeTimestamp = this.resumeTimestamp;
|
||||
options.resume = this.resume;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2020 the original author or authors.
|
||||
* Copyright 2010-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2020 the original author or authors.
|
||||
* Copyright 2010-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,12 +17,16 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.timeseries.GranularityDefinition;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.client.model.ValidationAction;
|
||||
import com.mongodb.client.model.ValidationLevel;
|
||||
@@ -42,29 +46,20 @@ public class CollectionOptions {
|
||||
private @Nullable Boolean capped;
|
||||
private @Nullable Collation collation;
|
||||
private ValidationOptions validationOptions;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>CollectionOptions</code> instance.
|
||||
*
|
||||
* @param size the collection size in bytes, this data space is preallocated. Can be {@literal null}.
|
||||
* @param maxDocuments the maximum number of documents in the collection. Can be {@literal null}.
|
||||
* @param capped true to created a "capped" collection (fixed size with auto-FIFO behavior based on insertion order),
|
||||
* false otherwise. Can be {@literal null}.
|
||||
* @deprecated since 2.0 please use {@link CollectionOptions#empty()} as entry point.
|
||||
*/
|
||||
@Deprecated
|
||||
public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) {
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none());
|
||||
}
|
||||
private @Nullable TimeSeriesOptions timeSeriesOptions;
|
||||
private @Nullable CollectionChangeStreamOptions changeStreamOptions;
|
||||
|
||||
private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped,
|
||||
@Nullable Collation collation, ValidationOptions validationOptions) {
|
||||
@Nullable Collation collation, ValidationOptions validationOptions, @Nullable TimeSeriesOptions timeSeriesOptions,
|
||||
@Nullable CollectionChangeStreamOptions changeStreamOptions) {
|
||||
|
||||
this.maxDocuments = maxDocuments;
|
||||
this.size = size;
|
||||
this.capped = capped;
|
||||
this.collation = collation;
|
||||
this.validationOptions = validationOptions;
|
||||
this.timeSeriesOptions = timeSeriesOptions;
|
||||
this.changeStreamOptions = changeStreamOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -76,9 +71,9 @@ public class CollectionOptions {
|
||||
*/
|
||||
public static CollectionOptions just(Collation collation) {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null!");
|
||||
Assert.notNull(collation, "Collation must not be null");
|
||||
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none());
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -88,18 +83,45 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public static CollectionOptions empty() {
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none());
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use
|
||||
* {@link #timeSeries(TimeSeriesOptions)}.
|
||||
*
|
||||
* @param timeField The name of the property which contains the date in each time series document. Must not be
|
||||
* {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @see #timeSeries(TimeSeriesOptions)
|
||||
* @since 3.3
|
||||
*/
|
||||
public static CollectionOptions timeSeries(String timeField) {
|
||||
return empty().timeSeries(TimeSeriesOptions.timeSeries(timeField));
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick way to set up {@link CollectionOptions} for emitting (pre & post) change events.
|
||||
*
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @see #changeStream(CollectionChangeStreamOptions)
|
||||
* @see CollectionChangeStreamOptions#preAndPostImages(boolean)
|
||||
* @since 4.0
|
||||
*/
|
||||
public static CollectionOptions emitChangedRevisions() {
|
||||
return empty().changeStream(CollectionChangeStreamOptions.preAndPostImages(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}. <br />
|
||||
* <strong>NOTE</strong> Using capped collections requires defining {@link #size(long)}.
|
||||
* <strong>NOTE:</strong> Using capped collections requires defining {@link #size(long)}.
|
||||
*
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions capped() {
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -110,7 +132,8 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions maxDocuments(long maxDocuments) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -121,7 +144,8 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions size(long size) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -132,7 +156,8 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions collation(@Nullable Collation collation) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -224,7 +249,7 @@ public class CollectionOptions {
|
||||
*/
|
||||
public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) {
|
||||
|
||||
Assert.notNull(validationLevel, "ValidationLevel must not be null!");
|
||||
Assert.notNull(validationLevel, "ValidationLevel must not be null");
|
||||
return validation(validationOptions.validationLevel(validationLevel));
|
||||
}
|
||||
|
||||
@@ -238,7 +263,7 @@ public class CollectionOptions {
|
||||
*/
|
||||
public CollectionOptions schemaValidationAction(ValidationAction validationAction) {
|
||||
|
||||
Assert.notNull(validationAction, "ValidationAction must not be null!");
|
||||
Assert.notNull(validationAction, "ValidationAction must not be null");
|
||||
return validation(validationOptions.validationAction(validationAction));
|
||||
}
|
||||
|
||||
@@ -251,8 +276,37 @@ public class CollectionOptions {
|
||||
*/
|
||||
public CollectionOptions validation(ValidationOptions validationOptions) {
|
||||
|
||||
Assert.notNull(validationOptions, "ValidationOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
Assert.notNull(validationOptions, "ValidationOptions must not be null");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}.
|
||||
*
|
||||
* @param timeSeriesOptions must not be {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) {
|
||||
|
||||
Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}.
|
||||
*
|
||||
* @param changeStreamOptions must not be {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CollectionOptions changeStream(CollectionChangeStreamOptions changeStreamOptions) {
|
||||
|
||||
Assert.notNull(changeStreamOptions, "ChangeStreamOptions must not be null");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -303,6 +357,80 @@ public class CollectionOptions {
|
||||
return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link TimeSeriesOptions} if available.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not specified.
|
||||
* @since 3.3
|
||||
*/
|
||||
public Optional<TimeSeriesOptions> getTimeSeriesOptions() {
|
||||
return Optional.ofNullable(timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CollectionChangeStreamOptions} if available.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not specified.
|
||||
* @since 4.0
|
||||
*/
|
||||
public Optional<CollectionChangeStreamOptions> getChangeStreamOptions() {
|
||||
return Optional.ofNullable(changeStreamOptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "CollectionOptions{" + "maxDocuments=" + maxDocuments + ", size=" + size + ", capped=" + capped
|
||||
+ ", collation=" + collation + ", validationOptions=" + validationOptions + ", timeSeriesOptions="
|
||||
+ timeSeriesOptions + ", changeStreamOptions=" + changeStreamOptions + ", disableValidation="
|
||||
+ disableValidation() + ", strictValidation=" + strictValidation() + ", moderateValidation="
|
||||
+ moderateValidation() + ", warnOnValidationError=" + warnOnValidationError() + ", failOnValidationError="
|
||||
+ failOnValidationError() + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
CollectionOptions that = (CollectionOptions) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(maxDocuments, that.maxDocuments)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(size, that.size)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(capped, that.capped)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(collation, that.collation)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(validationOptions, that.validationOptions)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(timeSeriesOptions, that.timeSeriesOptions)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(changeStreamOptions, that.changeStreamOptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(maxDocuments);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(size);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(capped);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(collation);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(validationOptions);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(timeSeriesOptions);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(changeStreamOptions);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encapsulation of ValidationOptions options.
|
||||
*
|
||||
@@ -385,7 +513,7 @@ public class CollectionOptions {
|
||||
/**
|
||||
* Get the {@code validationAction} to perform.
|
||||
*
|
||||
* @return @return {@link Optional#empty()} if not set.
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<ValidationAction> getValidationAction() {
|
||||
return Optional.ofNullable(validationAction);
|
||||
@@ -397,5 +525,211 @@ public class CollectionOptions {
|
||||
boolean isEmpty() {
|
||||
return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
return "ValidationOptions{" + "validator=" + validator + ", validationLevel=" + validationLevel
|
||||
+ ", validationAction=" + validationAction + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ValidationOptions that = (ValidationOptions) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(validator, that.validator)) {
|
||||
return false;
|
||||
}
|
||||
if (validationLevel != that.validationLevel)
|
||||
return false;
|
||||
return validationAction == that.validationAction;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(validator);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(validationLevel);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(validationAction);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encapsulation of options applied to define collections change stream behaviour.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 4.0
|
||||
*/
|
||||
public static class CollectionChangeStreamOptions {
|
||||
|
||||
private final boolean preAndPostImages;
|
||||
|
||||
private CollectionChangeStreamOptions(boolean emitChangedRevisions) {
|
||||
this.preAndPostImages = emitChangedRevisions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Output the version of a document before and after changes (the document pre- and post-images).
|
||||
*
|
||||
* @return new instance of {@link CollectionChangeStreamOptions}.
|
||||
*/
|
||||
public static CollectionChangeStreamOptions preAndPostImages(boolean emitChangedRevisions) {
|
||||
return new CollectionChangeStreamOptions(true);
|
||||
}
|
||||
|
||||
public boolean getPreAndPostImages() {
|
||||
return preAndPostImages;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "CollectionChangeStreamOptions{" + "preAndPostImages=" + preAndPostImages + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
CollectionChangeStreamOptions that = (CollectionChangeStreamOptions) o;
|
||||
|
||||
return preAndPostImages == that.preAndPostImages;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return (preAndPostImages ? 1 : 0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Options applicable to Time Series collections.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/timeseries-collections">https://docs.mongodb.com/manual/core/timeseries-collections</a>
|
||||
*/
|
||||
public static class TimeSeriesOptions {
|
||||
|
||||
private final String timeField;
|
||||
|
||||
private @Nullable final String metaField;
|
||||
|
||||
private final GranularityDefinition granularity;
|
||||
|
||||
private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity) {
|
||||
|
||||
Assert.hasText(timeField, "Time field must not be empty or null");
|
||||
|
||||
this.timeField = timeField;
|
||||
this.metaField = metaField;
|
||||
this.granularity = granularity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one,
|
||||
* that contains the date in each time series document. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param timeField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public static TimeSeriesOptions timeSeries(String timeField) {
|
||||
return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the name of the field which contains metadata in each time series document. Should not be the {@literal id}
|
||||
* nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or
|
||||
* {@link java.util.Collection}. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param metaField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public TimeSeriesOptions metaField(String metaField) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized.
|
||||
* Select one that is closest to the time span between incoming measurements.
|
||||
*
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
* @see Granularity
|
||||
*/
|
||||
public TimeSeriesOptions granularity(GranularityDefinition granularity) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public String getTimeField() {
|
||||
return timeField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via
|
||||
* {@link org.springframework.util.StringUtils#hasText(String)}.
|
||||
*/
|
||||
@Nullable
|
||||
public String getMetaField() {
|
||||
return metaField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public GranularityDefinition getGranularity() {
|
||||
return granularity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
return "TimeSeriesOptions{" + "timeField='" + timeField + '\'' + ", metaField='" + metaField + '\''
|
||||
+ ", granularity=" + granularity + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TimeSeriesOptions that = (TimeSeriesOptions) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(timeField, that.timeField)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(metaField, that.metaField)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(granularity, that.granularity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(timeField);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(metaField);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(granularity);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Interface for functional preparation of a {@link MongoCollection}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 4.1
|
||||
*/
|
||||
public interface CollectionPreparer<T> {
|
||||
|
||||
/**
|
||||
* Returns a preparer that always returns its input collection.
|
||||
*
|
||||
* @return a preparer that always returns its input collection.
|
||||
*/
|
||||
static <T> CollectionPreparer<T> identity() {
|
||||
return it -> it;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare the {@code collection}.
|
||||
*
|
||||
* @param collection the collection to prepare.
|
||||
* @return the prepared collection.
|
||||
*/
|
||||
T prepare(T collection);
|
||||
|
||||
/**
|
||||
* Returns a composed {@code CollectionPreparer} that first applies this preparer to the collection, and then applies
|
||||
* the {@code after} preparer to the result. If evaluation of either function throws an exception, it is relayed to
|
||||
* the caller of the composed function.
|
||||
*
|
||||
* @param after the collection preparer to apply after this function is applied.
|
||||
* @return a composed {@code CollectionPreparer} that first applies this preparer and then applies the {@code after}
|
||||
* preparer.
|
||||
*/
|
||||
default CollectionPreparer<T> andThen(CollectionPreparer<T> after) {
|
||||
Assert.notNull(after, "After CollectionPreparer must not be null");
|
||||
return c -> after.prepare(prepare(c));
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,182 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Support class for delegate implementations to apply {@link ReadConcern} and {@link ReadPreference} settings upon
|
||||
* {@link CollectionPreparer preparing a collection}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 4.1
|
||||
*/
|
||||
class CollectionPreparerSupport implements ReadConcernAware, ReadPreferenceAware {
|
||||
|
||||
private final List<Object> sources;
|
||||
|
||||
private CollectionPreparerSupport(List<Object> sources) {
|
||||
this.sources = sources;
|
||||
}
|
||||
|
||||
<T> T doPrepare(T collection, Function<T, ReadConcern> concernAccessor, BiFunction<T, ReadConcern, T> concernFunction,
|
||||
Function<T, ReadPreference> preferenceAccessor, BiFunction<T, ReadPreference, T> preferenceFunction) {
|
||||
|
||||
T collectionToUse = collection;
|
||||
|
||||
for (Object source : sources) {
|
||||
if (source instanceof ReadConcernAware rca && rca.hasReadConcern()) {
|
||||
|
||||
ReadConcern concern = rca.getReadConcern();
|
||||
if (concernAccessor.apply(collectionToUse) != concern) {
|
||||
collectionToUse = concernFunction.apply(collectionToUse, concern);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (Object source : sources) {
|
||||
if (source instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
|
||||
|
||||
ReadPreference preference = rpa.getReadPreference();
|
||||
if (preferenceAccessor.apply(collectionToUse) != preference) {
|
||||
collectionToUse = preferenceFunction.apply(collectionToUse, preference);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return collectionToUse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasReadConcern() {
|
||||
|
||||
for (Object aware : sources) {
|
||||
if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReadConcern getReadConcern() {
|
||||
|
||||
for (Object aware : sources) {
|
||||
if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) {
|
||||
return rca.getReadConcern();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasReadPreference() {
|
||||
|
||||
for (Object aware : sources) {
|
||||
if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReadPreference getReadPreference() {
|
||||
|
||||
for (Object aware : sources) {
|
||||
if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
|
||||
return rpa.getReadPreference();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
static class CollectionPreparerDelegate extends CollectionPreparerSupport
|
||||
implements CollectionPreparer<MongoCollection<Document>> {
|
||||
|
||||
private CollectionPreparerDelegate(List<Object> sources) {
|
||||
super(sources);
|
||||
}
|
||||
|
||||
public static CollectionPreparerDelegate of(ReadPreferenceAware... awares) {
|
||||
return of((Object[]) awares);
|
||||
}
|
||||
|
||||
public static CollectionPreparerDelegate of(Object... mixedAwares) {
|
||||
|
||||
if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) {
|
||||
return (CollectionPreparerDelegate) mixedAwares[0];
|
||||
}
|
||||
|
||||
return new CollectionPreparerDelegate(Arrays.asList(mixedAwares));
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoCollection<Document> prepare(MongoCollection<Document> collection) {
|
||||
return doPrepare(collection, MongoCollection::getReadConcern, MongoCollection::withReadConcern,
|
||||
MongoCollection::getReadPreference, MongoCollection::withReadPreference);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class ReactiveCollectionPreparerDelegate extends CollectionPreparerSupport
|
||||
implements CollectionPreparer<com.mongodb.reactivestreams.client.MongoCollection<Document>> {
|
||||
|
||||
private ReactiveCollectionPreparerDelegate(List<Object> sources) {
|
||||
super(sources);
|
||||
}
|
||||
|
||||
public static ReactiveCollectionPreparerDelegate of(ReadPreferenceAware... awares) {
|
||||
return of((Object[]) awares);
|
||||
}
|
||||
|
||||
public static ReactiveCollectionPreparerDelegate of(Object... mixedAwares) {
|
||||
|
||||
if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) {
|
||||
return (ReactiveCollectionPreparerDelegate) mixedAwares[0];
|
||||
}
|
||||
|
||||
return new ReactiveCollectionPreparerDelegate(Arrays.asList(mixedAwares));
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.mongodb.reactivestreams.client.MongoCollection<Document> prepare(
|
||||
com.mongodb.reactivestreams.client.MongoCollection<Document> collection) {
|
||||
return doPrepare(collection, //
|
||||
com.mongodb.reactivestreams.client.MongoCollection::getReadConcern,
|
||||
com.mongodb.reactivestreams.client.MongoCollection::withReadConcern,
|
||||
com.mongodb.reactivestreams.client.MongoCollection::getReadPreference,
|
||||
com.mongodb.reactivestreams.client.MongoCollection::withReadPreference);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,8 +23,8 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.query.MetricConversion;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
@@ -38,7 +38,7 @@ import org.springframework.util.ObjectUtils;
|
||||
*/
|
||||
class CountQuery {
|
||||
|
||||
private Document source;
|
||||
private final Document source;
|
||||
|
||||
private CountQuery(Document source) {
|
||||
this.source = source;
|
||||
@@ -64,18 +64,15 @@ class CountQuery {
|
||||
|
||||
for (Map.Entry<String, Object> entry : source.entrySet()) {
|
||||
|
||||
if (entry.getValue() instanceof Document && requiresRewrite(entry.getValue())) {
|
||||
if (entry.getValue() instanceof Document document && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Document theValue = (Document) entry.getValue();
|
||||
target.putAll(createGeoWithin(entry.getKey(), theValue, source.get("$and")));
|
||||
target.putAll(createGeoWithin(entry.getKey(), document, source.get("$and")));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof Collection && requiresRewrite(entry.getValue())) {
|
||||
if (entry.getValue() instanceof Collection<?> collection && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Collection<?> source = (Collection<?>) entry.getValue();
|
||||
|
||||
target.put(entry.getKey(), rewriteCollection(source));
|
||||
target.put(entry.getKey(), rewriteCollection(collection));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -96,12 +93,12 @@ class CountQuery {
|
||||
*/
|
||||
private boolean requiresRewrite(Object valueToInspect) {
|
||||
|
||||
if (valueToInspect instanceof Document) {
|
||||
return requiresRewrite((Document) valueToInspect);
|
||||
if (valueToInspect instanceof Document document) {
|
||||
return requiresRewrite(document);
|
||||
}
|
||||
|
||||
if (valueToInspect instanceof Collection) {
|
||||
return requiresRewrite((Collection) valueToInspect);
|
||||
if (valueToInspect instanceof Collection<?> collection) {
|
||||
return requiresRewrite(collection);
|
||||
}
|
||||
|
||||
return false;
|
||||
@@ -110,7 +107,7 @@ class CountQuery {
|
||||
private boolean requiresRewrite(Collection<?> collection) {
|
||||
|
||||
for (Object o : collection) {
|
||||
if (o instanceof Document && requiresRewrite((Document) o)) {
|
||||
if (o instanceof Document document && requiresRewrite(document)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -139,8 +136,8 @@ class CountQuery {
|
||||
Collection<Object> rewrittenCollection = new ArrayList<>(source.size());
|
||||
|
||||
for (Object item : source) {
|
||||
if (item instanceof Document && requiresRewrite(item)) {
|
||||
rewrittenCollection.add(CountQuery.of((Document) item).toQueryDocument());
|
||||
if (item instanceof Document document && requiresRewrite(item)) {
|
||||
rewrittenCollection.add(CountQuery.of(document).toQueryDocument());
|
||||
} else {
|
||||
rewrittenCollection.add(item);
|
||||
}
|
||||
@@ -157,12 +154,14 @@ class CountQuery {
|
||||
* @param $and potentially existing {@code $and} condition.
|
||||
* @return the rewritten query {@link Document}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Document createGeoWithin(String key, Document source, @Nullable Object $and) {
|
||||
|
||||
boolean spheric = source.containsKey("$nearSphere");
|
||||
Object $near = spheric ? source.get("$nearSphere") : source.get("$near");
|
||||
|
||||
Number maxDistance = source.containsKey("$maxDistance") ? (Number) source.get("$maxDistance") : Double.MAX_VALUE;
|
||||
Number maxDistance = getMaxDistance(source, $near, spheric);
|
||||
|
||||
List<Object> $centerMax = Arrays.asList(toCenterCoordinates($near), maxDistance);
|
||||
Document $geoWithinMax = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMax));
|
||||
@@ -176,23 +175,51 @@ class CountQuery {
|
||||
Document $geoWithinMin = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMin));
|
||||
|
||||
List<Document> criteria = new ArrayList<>();
|
||||
List<Document> criteria;
|
||||
|
||||
if ($and != null) {
|
||||
if ($and instanceof Collection) {
|
||||
criteria.addAll((Collection) $and);
|
||||
Collection<Document> andElements = (Collection<Document>) $and;
|
||||
criteria = new ArrayList<>(andElements.size() + 2);
|
||||
criteria.addAll(andElements);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Cannot rewrite query as it contains an '$and' element that is not a Collection!: Offending element: "
|
||||
"Cannot rewrite query as it contains an '$and' element that is not a Collection: Offending element: "
|
||||
+ $and);
|
||||
}
|
||||
} else {
|
||||
criteria = new ArrayList<>(2);
|
||||
}
|
||||
|
||||
criteria.add(new Document("$nor", Collections.singletonList(new Document(key, $geoWithinMin))));
|
||||
criteria.add(new Document(key, $geoWithinMax));
|
||||
|
||||
return new Document("$and", criteria);
|
||||
}
|
||||
|
||||
private static Number getMaxDistance(Document source, Object $near, boolean spheric) {
|
||||
|
||||
Number maxDistance = Double.MAX_VALUE;
|
||||
|
||||
if (source.containsKey("$maxDistance")) { // legacy coordinate pair
|
||||
return (Number) source.get("$maxDistance");
|
||||
}
|
||||
|
||||
if ($near instanceof Document nearDoc) {
|
||||
|
||||
if (nearDoc.containsKey("$maxDistance")) {
|
||||
|
||||
maxDistance = (Number) nearDoc.get("$maxDistance");
|
||||
// geojson is in Meters but we need radians x/(6378.1*1000)
|
||||
if (spheric && nearDoc.containsKey("$geometry")) {
|
||||
maxDistance = MetricConversion.metersToRadians(maxDistance.doubleValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return maxDistance;
|
||||
}
|
||||
|
||||
private static boolean containsNear(Document source) {
|
||||
return source.containsKey("$near") || source.containsKey("$nearSphere");
|
||||
}
|
||||
@@ -212,14 +239,20 @@ class CountQuery {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value instanceof Point) {
|
||||
return Arrays.asList(((Point) value).getX(), ((Point) value).getY());
|
||||
if (value instanceof Point point) {
|
||||
return Arrays.asList(point.getX(), point.getY());
|
||||
}
|
||||
|
||||
if (value instanceof Document && ((Document) value).containsKey("x")) {
|
||||
if (value instanceof Document document) {
|
||||
|
||||
Document point = (Document) value;
|
||||
return Arrays.asList(point.get("x"), point.get("y"));
|
||||
if (document.containsKey("x")) {
|
||||
return Arrays.asList(document.get("x"), document.get("y"));
|
||||
}
|
||||
|
||||
if (document.containsKey("$geometry")) {
|
||||
Document geoJsonPoint = document.get("$geometry", Document.class);
|
||||
return geoJsonPoint.get("coordinates");
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2002-2020 the original author or authors.
|
||||
* Copyright 2002-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -61,8 +61,8 @@ public interface CursorPreparer extends ReadPreferenceAware {
|
||||
default FindIterable<Document> initiateFind(MongoCollection<Document> collection,
|
||||
Function<MongoCollection<Document>, FindIterable<Document>> find) {
|
||||
|
||||
Assert.notNull(collection, "Collection must not be null!");
|
||||
Assert.notNull(find, "Find function must not be null!");
|
||||
Assert.notNull(collection, "Collection must not be null");
|
||||
Assert.notNull(find, "Find function must not be null");
|
||||
|
||||
if (hasReadPreference()) {
|
||||
collection = collection.withReadPreference(getReadPreference());
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2020 the original author or authors.
|
||||
* Copyright 2010-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2020 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,42 +16,47 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.util.Pair;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.*;
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.ReplaceOneModel;
|
||||
import com.mongodb.client.model.ReplaceOptions;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
@@ -67,7 +72,7 @@ import com.mongodb.client.model.*;
|
||||
* @author Jacob Botuck
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperations {
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
@@ -75,7 +80,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
||||
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
|
||||
private BulkWriteOptions bulkOptions;
|
||||
|
||||
/**
|
||||
@@ -90,14 +94,15 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
DefaultBulkOperations(MongoOperations mongoOperations, String collectionName,
|
||||
BulkOperationContext bulkOperationContext) {
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty!");
|
||||
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null!");
|
||||
super(collectionName);
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
|
||||
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.collectionName = collectionName;
|
||||
this.bulkOperationContext = bulkOperationContext;
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -109,14 +114,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
this.defaultWriteConcern = defaultWriteConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations insert(Object document) {
|
||||
|
||||
Assert.notNull(document, "Document must not be null!");
|
||||
Assert.notNull(document, "Document must not be null");
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(document);
|
||||
@@ -125,93 +126,65 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations insert(List<? extends Object> documents) {
|
||||
|
||||
Assert.notNull(documents, "Documents must not be null!");
|
||||
Assert.notNull(documents, "Documents must not be null");
|
||||
|
||||
documents.forEach(this::insert);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateOne(Query query, Update update) {
|
||||
public BulkOperations updateOne(Query query, UpdateDefinition update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
return updateOne(Collections.singletonList(Pair.of(query, update)));
|
||||
return update(query, update, false, false);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations updateOne(List<Pair<Query, Update>> updates) {
|
||||
public BulkOperations updateOne(List<Pair<Query, UpdateDefinition>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null!");
|
||||
Assert.notNull(updates, "Updates must not be null");
|
||||
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
for (Pair<Query, UpdateDefinition> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, false);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateMulti(Query query, Update update) {
|
||||
public BulkOperations updateMulti(Query query, UpdateDefinition update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
return updateMulti(Collections.singletonList(Pair.of(query, update)));
|
||||
update(query, update, false, true);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations updateMulti(List<Pair<Query, Update>> updates) {
|
||||
public BulkOperations updateMulti(List<Pair<Query, UpdateDefinition>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null!");
|
||||
Assert.notNull(updates, "Updates must not be null");
|
||||
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
for (Pair<Query, UpdateDefinition> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, true);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations upsert(Query query, Update update) {
|
||||
public BulkOperations upsert(Query query, UpdateDefinition update) {
|
||||
return update(query, update, true, true);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations upsert(List<Pair<Query, Update>> updates) {
|
||||
|
||||
@@ -222,14 +195,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#remove(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations remove(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
|
||||
DeleteOptions deleteOptions = new DeleteOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
||||
@@ -239,14 +208,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#remove(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations remove(List<Query> removes) {
|
||||
|
||||
Assert.notNull(removes, "Removals must not be null!");
|
||||
Assert.notNull(removes, "Removals must not be null");
|
||||
|
||||
for (Query query : removes) {
|
||||
remove(query);
|
||||
@@ -255,16 +220,12 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#replaceOne(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(replacement, "Replacement must not be null");
|
||||
Assert.notNull(options, "Options must not be null");
|
||||
|
||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||
replaceOptions.upsert(options.isUpsert());
|
||||
@@ -278,10 +239,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
|
||||
*/
|
||||
@Override
|
||||
public com.mongodb.bulk.BulkWriteResult execute() {
|
||||
|
||||
@@ -289,14 +246,14 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo);
|
||||
|
||||
Assert.state(result != null, "Result must not be null.");
|
||||
Assert.state(result != null, "Result must not be null");
|
||||
|
||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||
models.forEach(this::maybeInvokeAfterSaveCallback);
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -315,9 +272,8 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
bulkOptions);
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
if (ex instanceof MongoBulkWriteException) {
|
||||
if (ex instanceof MongoBulkWriteException mongoBulkWriteException) {
|
||||
|
||||
MongoBulkWriteException mongoBulkWriteException = (MongoBulkWriteException) ex;
|
||||
if (mongoBulkWriteException.getWriteConcernError() != null) {
|
||||
throw new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
@@ -332,17 +288,17 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
if (it.model() instanceof InsertOneModel<Document> model) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = model.getDocument();
|
||||
maybeInvokeBeforeSaveCallback(it.source(), target);
|
||||
} else if (it.model() instanceof ReplaceOneModel<Document> model) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
Document target = model.getReplacement();
|
||||
maybeInvokeBeforeSaveCallback(it.source(), target);
|
||||
}
|
||||
|
||||
return mapWriteModel(it.getModel());
|
||||
return mapWriteModel(it.source(), it.model());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -354,10 +310,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
* @param multi whether to issue a multi-update.
|
||||
* @return the {@link BulkOperations} with the update registered.
|
||||
*/
|
||||
private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) {
|
||||
private BulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
UpdateOptions options = computeUpdateOptions(query, update, upsert);
|
||||
|
||||
@@ -370,53 +326,30 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
private WriteModel<Document> mapWriteModel(WriteModel<Document> writeModel) {
|
||||
|
||||
if (writeModel instanceof UpdateOneModel) {
|
||||
|
||||
UpdateOneModel<Document> model = (UpdateOneModel<Document>) writeModel;
|
||||
|
||||
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof UpdateManyModel) {
|
||||
|
||||
UpdateManyModel<Document> model = (UpdateManyModel<Document>) writeModel;
|
||||
|
||||
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteOneModel) {
|
||||
|
||||
DeleteOneModel<Document> model = (DeleteOneModel<Document>) writeModel;
|
||||
|
||||
return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteManyModel) {
|
||||
|
||||
DeleteManyModel<Document> model = (DeleteManyModel<Document>) writeModel;
|
||||
|
||||
return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
return writeModel;
|
||||
@Override
|
||||
protected void maybeEmitEvent(ApplicationEvent event) {
|
||||
bulkOperationContext.publishEvent(event);
|
||||
}
|
||||
|
||||
private Bson getMappedUpdate(Bson update) {
|
||||
return bulkOperationContext.getUpdateMapper().getMappedObject(update, bulkOperationContext.getEntity());
|
||||
@Override
|
||||
protected UpdateMapper updateMapper() {
|
||||
return bulkOperationContext.updateMapper();
|
||||
}
|
||||
|
||||
private Bson getMappedQuery(Bson query) {
|
||||
return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity());
|
||||
@Override
|
||||
protected QueryMapper queryMapper() {
|
||||
return bulkOperationContext.queryMapper();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<? extends MongoPersistentEntity<?>> entity() {
|
||||
return bulkOperationContext.entity();
|
||||
}
|
||||
|
||||
private Document getMappedObject(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
if (source instanceof Document document) {
|
||||
return document;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
@@ -429,268 +362,83 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
if (holder.model() instanceof InsertOneModel<Document> model) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = model.getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.source(), target);
|
||||
} else if (holder.model() instanceof ReplaceOneModel<Document> model) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
Document target = model.getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.source(), target);
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (bulkOperationContext.getEventPublisher() == null) {
|
||||
return event;
|
||||
}
|
||||
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
return event;
|
||||
private void publishEvent(MongoMappingEvent<?> event) {
|
||||
bulkOperationContext.publishEvent(event);
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeConvertCallback(Object value) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeConvertCallback.class, value, collectionName);
|
||||
return bulkOperationContext.callback(BeforeConvertCallback.class, value, collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
return bulkOperationContext.callback(BeforeSaveCallback.class, value, mappedDocument, collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
|
||||
switch (bulkMode) {
|
||||
case ORDERED:
|
||||
return options.ordered(true);
|
||||
case UNORDERED:
|
||||
return options.ordered(false);
|
||||
}
|
||||
|
||||
throw new IllegalStateException("BulkMode was null!");
|
||||
return bulkOperationContext.callback(AfterSaveCallback.class, value, mappedDocument, collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
|
||||
* @param update The {@link Update} to apply
|
||||
* @param upsert flag to indicate if document should be upserted.
|
||||
* @return new instance of {@link UpdateOptions}.
|
||||
*/
|
||||
private static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
|
||||
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
|
||||
list.add(arrayFilter.asDocument());
|
||||
}
|
||||
options.arrayFilters(list);
|
||||
}
|
||||
|
||||
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link BulkOperationContext} holds information about
|
||||
* {@link org.springframework.data.mongodb.core.BulkOperations.BulkMode} the entity in use as well as references to
|
||||
* {@link BulkOperationContext} holds information about {@link BulkMode} the entity in use as well as references to
|
||||
* {@link QueryMapper} and {@link UpdateMapper}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
static final class BulkOperationContext {
|
||||
record BulkOperationContext(BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
||||
QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher,
|
||||
@Nullable EntityCallbacks entityCallbacks) {
|
||||
|
||||
private final BulkMode bulkMode;
|
||||
private final Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final ApplicationEventPublisher eventPublisher;
|
||||
private final EntityCallbacks entityCallbacks;
|
||||
|
||||
BulkOperationContext(BulkOperations.BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
||||
QueryMapper queryMapper, UpdateMapper updateMapper, ApplicationEventPublisher eventPublisher,
|
||||
EntityCallbacks entityCallbacks) {
|
||||
|
||||
this.bulkMode = bulkMode;
|
||||
this.entity = entity;
|
||||
this.queryMapper = queryMapper;
|
||||
this.updateMapper = updateMapper;
|
||||
this.eventPublisher = eventPublisher;
|
||||
this.entityCallbacks = entityCallbacks;
|
||||
public boolean skipEntityCallbacks() {
|
||||
return entityCallbacks == null;
|
||||
}
|
||||
|
||||
public BulkMode getBulkMode() {
|
||||
return this.bulkMode;
|
||||
public boolean skipEventPublishing() {
|
||||
return eventPublisher == null;
|
||||
}
|
||||
|
||||
public Optional<? extends MongoPersistentEntity<?>> getEntity() {
|
||||
return this.entity;
|
||||
}
|
||||
@SuppressWarnings("rawtypes")
|
||||
public <T> T callback(Class<? extends EntityCallback> callbackType, T entity, String collectionName) {
|
||||
|
||||
public QueryMapper getQueryMapper() {
|
||||
return this.queryMapper;
|
||||
}
|
||||
|
||||
public UpdateMapper getUpdateMapper() {
|
||||
return this.updateMapper;
|
||||
}
|
||||
|
||||
public ApplicationEventPublisher getEventPublisher() {
|
||||
return this.eventPublisher;
|
||||
}
|
||||
|
||||
public EntityCallbacks getEntityCallbacks() {
|
||||
return this.entityCallbacks;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
BulkOperationContext that = (BulkOperationContext) o;
|
||||
|
||||
if (bulkMode != that.bulkMode)
|
||||
return false;
|
||||
if (!ObjectUtils.nullSafeEquals(this.entity, that.entity)) {
|
||||
return false;
|
||||
if (skipEntityCallbacks()) {
|
||||
return entity;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.queryMapper, that.queryMapper)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.updateMapper, that.updateMapper)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.eventPublisher, that.eventPublisher)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.entityCallbacks, that.entityCallbacks);
|
||||
|
||||
return entityCallbacks.callback(callbackType, entity, collectionName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = bulkMode != null ? bulkMode.hashCode() : 0;
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(entity);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(queryMapper);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(updateMapper);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(eventPublisher);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(entityCallbacks);
|
||||
return result;
|
||||
@SuppressWarnings("rawtypes")
|
||||
public <T> T callback(Class<? extends EntityCallback> callbackType, T entity, Document document,
|
||||
String collectionName) {
|
||||
|
||||
if (skipEntityCallbacks()) {
|
||||
return entity;
|
||||
}
|
||||
|
||||
return entityCallbacks.callback(callbackType, entity, document, collectionName);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "DefaultBulkOperations.BulkOperationContext(bulkMode=" + this.getBulkMode() + ", entity="
|
||||
+ this.getEntity() + ", queryMapper=" + this.getQueryMapper() + ", updateMapper=" + this.getUpdateMapper()
|
||||
+ ", eventPublisher=" + this.getEventPublisher() + ", entityCallbacks=" + this.getEntityCallbacks() + ")";
|
||||
public void publishEvent(ApplicationEvent event) {
|
||||
|
||||
if (skipEventPublishing()) {
|
||||
return;
|
||||
}
|
||||
|
||||
eventPublisher.publishEvent(event);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||
*
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static final class SourceAwareWriteModelHolder {
|
||||
|
||||
private final Object source;
|
||||
private final WriteModel<Document> model;
|
||||
|
||||
SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
|
||||
|
||||
this.source = source;
|
||||
this.model = model;
|
||||
}
|
||||
|
||||
public Object getSource() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
public WriteModel<Document> getModel() {
|
||||
return this.model;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
SourceAwareWriteModelHolder that = (SourceAwareWriteModelHolder) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.source, that.source)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.model, that.model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(model);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(source);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "DefaultBulkOperations.SourceAwareWriteModelHolder(source=" + this.getSource() + ", model="
|
||||
+ this.getModel() + ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ import java.util.List;
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
@@ -29,6 +30,7 @@ import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
@@ -83,9 +85,9 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
@Nullable Class<?> type) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!");
|
||||
Assert.notNull(collectionName, "Collection name can not be null!");
|
||||
Assert.notNull(queryMapper, "QueryMapper must not be null!");
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null");
|
||||
Assert.notNull(collectionName, "Collection name can not be null");
|
||||
Assert.notNull(queryMapper, "QueryMapper must not be null");
|
||||
|
||||
this.collectionName = collectionName;
|
||||
this.mapper = queryMapper;
|
||||
@@ -103,8 +105,8 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
*/
|
||||
public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName, @Nullable Class<?> type) {
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.mapper = new QueryMapper(mongoOperations.getConverter());
|
||||
@@ -112,10 +114,6 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)
|
||||
*/
|
||||
public String ensureIndex(final IndexDefinition indexDefinition) {
|
||||
|
||||
return execute(collection -> {
|
||||
@@ -150,10 +148,6 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperations#dropIndex(java.lang.String)
|
||||
*/
|
||||
public void dropIndex(final String name) {
|
||||
|
||||
execute(collection -> {
|
||||
@@ -163,18 +157,24 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperations#dropAllIndexes()
|
||||
*/
|
||||
@Override
|
||||
public void alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) {
|
||||
|
||||
Document indexOptions = new Document("name", name);
|
||||
indexOptions.putAll(options.toDocument());
|
||||
|
||||
Document result = mongoOperations
|
||||
.execute(db -> db.runCommand(new Document("collMod", collectionName).append("index", indexOptions)));
|
||||
|
||||
if(NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) {
|
||||
throw new UncategorizedMongoDbException("Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null);
|
||||
}
|
||||
}
|
||||
|
||||
public void dropAllIndexes() {
|
||||
dropIndex("*");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperations#getIndexInfo()
|
||||
*/
|
||||
public List<IndexInfo> getIndexInfo() {
|
||||
|
||||
return execute(new CollectionCallback<List<IndexInfo>>() {
|
||||
@@ -188,7 +188,8 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private List<IndexInfo> getIndexData(MongoCursor<Document> cursor) {
|
||||
|
||||
List<IndexInfo> indexInfoList = new ArrayList<>();
|
||||
int available = cursor.available();
|
||||
List<IndexInfo> indexInfoList = available > 0 ? new ArrayList<>(available) : new ArrayList<>();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
|
||||
@@ -205,7 +206,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
@Nullable
|
||||
public <T> T execute(CollectionCallback<T> callback) {
|
||||
|
||||
Assert.notNull(callback, "CollectionCallback must not be null!");
|
||||
Assert.notNull(callback, "CollectionCallback must not be null");
|
||||
|
||||
if (type != null) {
|
||||
return mongoOperations.execute(type, callback);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2016-2020 the original author or authors.
|
||||
* Copyright 2016-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -42,12 +42,8 @@ class DefaultIndexOperationsProvider implements IndexOperationsProvider {
|
||||
this.mapper = mapper;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperationsProvider#reactiveIndexOps(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public IndexOperations indexOps(String collectionName) {
|
||||
return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper);
|
||||
public IndexOperations indexOps(String collectionName, Class<?> type) {
|
||||
return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper, type);
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user