This commit is contained in:
Oleksandr Petrov 2012-01-25 21:56:09 +01:00
commit 9af8a13ea9
12 changed files with 419 additions and 64 deletions

View file

@ -1,6 +1,7 @@
# Monger
Monger is an idiomatic Clojure wrapper around MongoDB Java driver.
Monger is an idiomatic Clojure wrapper around MongoDB Java driver. It offers powerful expressive query DSL, strives to support
every MongoDB 2.0+ feature and is well maintained.
## Project Goals
@ -17,11 +18,18 @@ wanted a client that will
* Learn from other clients like the Java and Ruby ones.
* Target Clojure 1.3.0 and later from the ground up.
## Usage
We are working on documentation guides & examples site for the 1.0 release. Please refer to the test suite for code examples.
We are working on documentation guides & examples site for the 1.0 release. In the meantime, please refer to the [test suite](https://github.com/michaelklishin/monger/tree/master/test/monger/test) for code examples.
Here is what monger.query DSL looks like right now:
## Powerful Query DSL
Every application that works with data stores has to query them. As a consequence, having an expressive powerful query DSL is a must
for client libraries like Monger.
Here is what monger.query DSL feels like:
``` clojure
(with-collection "docs"
@ -34,12 +42,38 @@ Here is what monger.query DSL looks like right now:
(snapshot))
```
More code examples can be found in our test suite.
It is easy to add new DSL elements, for example, adding pagination took literally less than 10 lines of Clojure code. Here is what
it looks like:
``` clojure
(with-collection coll
(find {})
(paginate :page 1 :per-page 3)
(sort { :title 1 })
(read-preference ReadPreference/PRIMARY))
```
Query DSL supports composition, too:
``` clojure
(let
[top3 (partial-query (limit 3))
by-population-desc (partial-query (sort { :population -1 }))
result (with-collection coll
(find {})
(merge top3)
(merge by-population-desc))]
;; ...
)
```
More code examples can be found [in our test suite](https://github.com/michaelklishin/monger/tree/master/test/monger/test).
## This is a Work In Progress
Core Monger APIs are stabilized but it is still a work in progress. Keep that in mind. 1.0 will be released in late 2011.
Core Monger APIs are stabilized but it is still a work in progress. Keep that in mind. 1.0 will be released in early 2012
together with documentation guides and dedicated website.
## Artifacts
@ -48,7 +82,7 @@ Snapshot artifacts are [released to Clojars](https://clojars.org/com.novemberain
With Leiningen:
[com.novemberain/monger "0.11.0-SNAPSHOT"]
[com.novemberain/monger "1.0.0-SNAPSHOT"]
With Maven:
@ -56,7 +90,7 @@ With Maven:
<dependency>
<groupId>com.novemberain</groupId>
<artifactId>monger</artifactId>
<version>0.11.0-SNAPSHOT</version>
<version>1.0.0-SNAPSHOT</version>
</dependency>
@ -65,7 +99,7 @@ With Maven:
[![Continuous Integration status](https://secure.travis-ci.org/michaelklishin/monger.png)](http://travis-ci.org/michaelklishin/monger)
CI is hosted by [travis-ci.org](http://travis-ci.org)
CI is hosted by [travis-ci.org](http://travis-ci.org).

View file

@ -8,7 +8,7 @@
;; You must not remove this notice, or any other, from this software.
(ns monger.collection
(:refer-clojure :exclude [find remove count drop distinct])
(:refer-clojure :exclude [find remove count drop distinct empty?])
(:import [com.mongodb Mongo DB DBCollection WriteResult DBObject WriteConcern DBCursor MapReduceCommand MapReduceCommand$OutputType]
[java.util List Map]
[clojure.lang IPersistentMap ISeq])

View file

@ -22,9 +22,10 @@
;; THE SOFTWARE.
(ns monger.conversion
(:import (com.mongodb DBObject BasicDBObject BasicDBList DBCursor)
(clojure.lang IPersistentMap Keyword)
(java.util List Map)))
(:import [com.mongodb DBObject BasicDBObject BasicDBList DBCursor]
[clojure.lang IPersistentMap Keyword]
[java.util List Map Date]
[org.bson.types ObjectId]))
(defprotocol ConvertToDBObject
(to-db-object [input] "Converts given piece of Clojure data to BasicDBObject MongoDB Java driver uses"))
@ -101,3 +102,22 @@
(assoc m k (from-db-object v false))))
{} (reverse pairs)))
(defprotocol ConvertToObjectId
(to-object-id [input] "Instantiates ObjectId from input unless the input itself is an ObjectId instance. In that case, returns input as is."))
(extend-protocol ConvertToObjectId
String
(to-object-id [^String input]
(ObjectId. input))
Date
(to-object-id [^Date input]
(ObjectId. input))
ObjectId
(to-object-id [^ObjectId input]
input))

View file

@ -14,8 +14,9 @@
monger.core
(:refer-clojure :exclude [count])
(:use [monger.conversion])
(:import (com.mongodb Mongo DB WriteConcern DBObject DBCursor)
(java.util Map)))
(:import [com.mongodb Mongo DB WriteConcern DBObject DBCursor CommandResult]
[com.mongodb.gridfs GridFS]
[java.util Map]))
;;
;; Defaults
@ -28,6 +29,7 @@
(declare ^:dynamic ^DB *mongodb-database*)
(def ^:dynamic ^WriteConcern *mongodb-write-concern* WriteConcern/SAFE)
(declare ^:dynamic ^GridFS *mongodb-gridfs*)
;;
;; API
@ -75,33 +77,37 @@
`(binding [*mongodb-database* ~db]
(do ~@body)))
(defmacro with-gridfs
[fs & body]
`(binding [*mongodb-gridfs* ~fs]
(do ~@body)))
(defn connect!
"Connect to MongoDB, save connection to *mongodb-connection* dynamic variable"
"Connect to MongoDB, store connection in the *mongodb-connection* var"
^Mongo [& args]
(def ^:dynamic *mongodb-connection* (apply connect args)))
(defn set-db!
"Set dynamic *mongodb-database* variable to given :db"
"Sets *mongodb-database* var to given db, updates *mongodb-gridfs* var state. Recommended to be used for
applications that only use one database."
[db]
(def ^:dynamic *mongodb-database* db))
(def ^:dynamic *mongodb-database* db)
(def ^:dynamic *mongodb-gridfs* (GridFS. db)))
(defn set-default-write-concern!
[wc]
"Set dynamic *mongodb-write-concert* to :wc
"Set *mongodb-write-concert* var to :wc
We recommend to use WriteConcern/SAFE by default to make sure your data was written."
Unlike the official Java driver, Monger uses WriteConcern/SAFE by default. We think defaults should be safe first
and WebScale fast second."
(def ^:dynamic *mongodb-write-concern* wc))
(defn command
"Available commands (please check MongoDB documentation for a complete list of commands for particular DB version.
Returns CommandResult.
Use (.ok result) to get response status.
It implements AbstractMap interface, so you can access it's internals:
(get (monger.core/command { :collstats \"things\") \"ns\")) ;; => monger-test.things
(defn ^CommandResult command
"Runs a database command (please check MongoDB documentation for the complete list of commands). Some common commands
are:
{ :buildinfo 1 } returns version number and build information about the current MongoDB server, should be executed via admin DB.
@ -162,7 +168,7 @@
(extend-protocol Countable
DBCursor
(count [^com.mongodb.DBCursor this]
(count [^DBCursor this]
(.count this)))
(defn ^DBObject get-last-error
@ -184,5 +190,3 @@
(.getLastError ^DB database w wtimeout fsync))
([^DB database ^WriteConcern write-concern]
(.getLastError ^DB database write-concern)))

100
src/monger/gridfs.clj Normal file
View file

@ -0,0 +1,100 @@
(ns monger.gridfs
(:refer-clojure :exclude [remove find])
(:require [monger.core]
[clojure.java.io :as io])
(:use [monger.conversion])
(:import [com.mongodb DBObject]
[com.mongodb.gridfs GridFS GridFSInputFile]
[java.io InputStream File]))
;;
;; Implementation
;;
(def
^{:doc "Type object for a Java primitive byte array."
:private true
}
byte-array-type (class (make-array Byte/TYPE 0)))
;; ...
;;
;; API
;;
(defn remove
([]
(remove {}))
([query]
(.remove ^GridFS monger.core/*mongodb-gridfs* ^DBObject (to-db-object query))))
(defn remove-all
[]
(remove {}))
(defn all-files
([]
(.getFileList ^GridFS monger.core/*mongodb-gridfs*))
([query]
(.getFileList ^GridFS monger.core/*mongodb-gridfs* query)))
(defprotocol GridFSInputFileFactory
(^GridFSInputFile make-input-file [input] "Makes GridFSInputFile out of given input"))
(extend byte-array-type
GridFSInputFileFactory
{ :make-input-file (fn [^bytes input]
(.createFile ^GridFS monger.core/*mongodb-gridfs* input)) })
(extend-protocol GridFSInputFileFactory
String
(make-input-file [^String input]
(.createFile ^GridFS monger.core/*mongodb-gridfs* ^InputStream (io/make-input-stream input { :encoding "UTF-8" })))
File
(make-input-file [^File input]
(.createFile ^GridFS monger.core/*mongodb-gridfs* ^InputStream (io/make-input-stream input { :encoding "UTF-8" })))
InputStream
(make-input-file [^InputStream input]
(.createFile ^GridFS monger.core/*mongodb-gridfs* ^InputStream input)))
(defmacro store
[^GridFSInputFile input & body]
`(let [^GridFSInputFile f# (doto ~input ~@body)]
(.save f# GridFS/DEFAULT_CHUNKSIZE)
(from-db-object f# true)))
(defprotocol Finders
(find [input] "Finds multiple files using given input (an ObjectId, filename or query)")
(find-one [input] "Finds one file using given input (an ObjectId, filename or query)"))
(extend-protocol Finders
String
(find [^String input]
(vec (.find ^GridFS monger.core/*mongodb-gridfs* input)))
(find-one [^String input]
(.findOne ^GridFS monger.core/*mongodb-gridfs* input))
org.bson.types.ObjectId
(find-one [^org.bson.types.ObjectId input]
(.findOne ^GridFS monger.core/*mongodb-gridfs* input))
DBObject
(find [^DBObject input]
(vec (.find ^GridFS monger.core/*mongodb-gridfs* input)))
(find-one [^DBObject input]
(.findOne ^GridFS monger.core/*mongodb-gridfs* input))
clojure.lang.PersistentArrayMap
(find [^clojure.lang.PersistentArrayMap input]
(find (to-db-object input))))

View file

@ -2,7 +2,7 @@
(:refer-clojure :exclude [select find sort])
(:require [monger.core]
[monger.internal pagination])
(:import [com.mongodb DB DBCollection DBObject DBCursor]
(:import [com.mongodb DB DBCollection DBObject DBCursor ReadPreference]
[java.util List])
(:use [monger conversion operators]))
@ -34,9 +34,8 @@
;; deleted during the query, it may or may not be returned, even with snapshot mode). Note that short query responses
;; (less than 1MB) are always effectively snapshotted. Currently, snapshot mode may not be used with sorting or explicit hints.
(defn empty-query
[^DBCollection coll]
([]
{
:collection coll
:query {}
:sort {}
:fields []
@ -46,22 +45,25 @@
:hint nil
:snapshot false
})
([^DBCollection coll]
(merge (empty-query) { :collection coll })))
(defn- fields-to-db-object
[^List fields]
(to-db-object (zipmap fields (repeat 1))))
(defn exec
[{ :keys [collection query fields skip limit sort batch-size hint snapshot] :or { limit 0 batch-size 256 skip 0 } }]
[{ :keys [collection query fields skip limit sort batch-size hint snapshot read-preference] :or { limit 0 batch-size 256 skip 0 } }]
(let [cursor (doto ^DBCursor (.find ^DBCollection collection (to-db-object query) (fields-to-db-object fields))
(.limit limit)
(.skip skip)
(.sort (to-db-object sort))
(.batchSize batch-size)
(.hint ^DBObject (to-db-object hint))
)]
(if snapshot
(.hint ^DBObject (to-db-object hint)))]
(when snapshot
(.snapshot cursor))
(when read-preference
(.setReadPreference cursor read-preference))
(map (fn [x] (from-db-object x true))
(seq cursor))))
@ -101,6 +103,10 @@
[m]
(merge m { :snapshot true }))
(defn read-preference
[m ^ReadPreference rp]
(merge m { :read-preference rp }))
(defn paginate
[m & { :keys [page per-page] :or { page 1 per-page 10 } }]
(merge m { :limit per-page :skip (monger.internal.pagination/offset-for page per-page) }))
@ -112,3 +118,7 @@
~coll)]
(let [query# (-> (empty-query *query-collection*) ~@body)]
(exec query#))))
(defmacro partial-query
[& body]
`(-> {} ~@body))

View file

@ -3,9 +3,7 @@
(ns monger.test.atomic-modifiers
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject CommandResult$CommandFailure]
[org.bson.types ObjectId]
[java.util Date]
)
[java.util Date])
(:require [monger core util]
[monger.collection :as mgcol]
[monger.result :as mgres]

View file

@ -130,9 +130,9 @@
(deftest find-one-full-document-when-collection-has-matches
(let [collection "docs"
doc-id (monger.util/random-uuid)
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }]
(mgcol/insert collection doc)
(def ^DBObject found-one (mgcol/find-one collection { :language "Clojure" }))
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
_ (mgcol/insert collection doc)
found-one (mgcol/find-one collection { :language "Clojure" })]
(is (= (:_id doc) (monger.util/get-id found-one)))
(is (= (mgcnv/from-db-object found-one true) doc))
(is (= (mgcnv/to-db-object doc) found-one))))
@ -150,9 +150,9 @@
(let [collection "docs"
doc-id (monger.util/random-uuid)
doc { :data-store "MongoDB", :language "Clojure", :_id doc-id }
fields [:language]]
(mgcol/insert collection doc)
(def ^DBObject loaded (mgcol/find-one collection { :language "Clojure" } fields))
fields [:language]
_ (mgcol/insert collection doc)
loaded (mgcol/find-one collection { :language "Clojure" } fields)]
(is (nil? (.get ^DBObject loaded "data-store")))
(is (= doc-id (monger.util/get-id loaded)))
(is (= "Clojure" (.get ^DBObject loaded "language")))))

View file

@ -2,7 +2,8 @@
(:require [monger core collection]
[monger.conversion :as cnv])
(:import [com.mongodb DBObject BasicDBObject BasicDBList]
[java.util Date Calendar List ArrayList])
[java.util Date Calendar List ArrayList]
[org.bson.types ObjectId])
(:use [clojure.test]))
@ -131,3 +132,13 @@
(is (= (-> output (get "nested") (get "list")) ["red" "green" "blue"]))
(is (= (-> output (get "nested") (get "dblist")) [0 1]))))
;;
;; ObjectId coercion
;;
(deftest test-conversion-to-object-id
(let [output (ObjectId. "4efb39370364238a81020502")]
(is (= output (cnv/to-object-id "4efb39370364238a81020502")))
(is (= output (cnv/to-object-id output)))))

130
test/monger/test/gridfs.clj Normal file
View file

@ -0,0 +1,130 @@
(ns monger.test.gridfs
(:refer-clojure :exclude [count remove find])
(:use [clojure.test]
[monger.core :only [count]]
[monger.test.fixtures]
[monger operators conversion]
[monger.gridfs :only (store make-input-file)])
(:require [monger.gridfs :as gridfs]
[monger.test.helper :as helper]
[clojure.java.io :as io])
(:import [java.io InputStream File FileInputStream]
[com.mongodb.gridfs GridFS GridFSInputFile GridFSDBFile]))
(defn purge-gridfs
[f]
(gridfs/remove-all)
(f)
(gridfs/remove-all))
(use-fixtures :each purge-gridfs)
(helper/connect!)
(deftest test-storing-files-to-gridfs-using-relative-fs-paths
(let [input "./test/resources/mongo/js/mapfun1.js"]
(is (= 0 (count (gridfs/all-files))))
(store (make-input-file input)
(.setFilename "monger.test.gridfs.file1")
(.setContentType "application/octet-stream"))
(is (= 1 (count (gridfs/all-files))))))
(deftest test-storing-files-to-gridfs-using-file-instances
(let [input (io/as-file "./test/resources/mongo/js/mapfun1.js")]
(is (= 0 (count (gridfs/all-files))))
(store (make-input-file input)
(.setFilename "monger.test.gridfs.file2")
(.setContentType "application/octet-stream"))
(is (= 1 (count (gridfs/all-files))))))
(deftest test-storing-bytes-to-gridfs
(let [input (.getBytes "A string")]
(is (= 0 (count (gridfs/all-files))))
(store (make-input-file input)
(.setFilename "monger.test.gridfs.file3")
(.setContentType "application/octet-stream"))
(is (= 1 (count (gridfs/all-files))))))
(deftest test-storing-files-to-gridfs-using-absolute-fs-paths
(let [tmp-file (File/createTempFile "monger.test.gridfs" "test-storing-files-to-gridfs-using-absolute-fs-paths")
_ (spit tmp-file "Some content")
input (.getAbsolutePath tmp-file)]
(is (= 0 (count (gridfs/all-files))))
(store (make-input-file input)
(.setFilename "monger.test.gridfs.file4")
(.setContentType "application/octet-stream"))
(is (= 1 (count (gridfs/all-files))))))
(deftest test-storing-files-to-gridfs-using-input-stream
(let [tmp-file (File/createTempFile "monger.test.gridfs" "test-storing-files-to-gridfs-using-input-stream")
_ (spit tmp-file "Some other content")]
(is (= 0 (count (gridfs/all-files))))
(store (make-input-file (FileInputStream. tmp-file))
(.setFilename "monger.test.gridfs.file4b")
(.setContentType "application/octet-stream"))
(is (= 1 (count (gridfs/all-files))))))
(deftest test-finding-individual-files-on-gridfs
(let [input "./test/resources/mongo/js/mapfun1.js"
ct "binary/octet-stream"
filename "monger.test.gridfs.file5"
md5 "14a09deabb50925a3381315149017bbd"
stored (store (make-input-file input)
(.setFilename filename)
(.setContentType ct))]
(is (= 1 (count (gridfs/all-files))))
(is (:_id stored))
(is (:uploadDate stored))
(is (= 62 (:length stored)))
(is (= md5 (:md5 stored)))
(is (= filename (:filename stored)))
(is (= ct (:contentType stored)))
(are [a b] (is (= a (:md5 (from-db-object (gridfs/find-one b) true))))
md5 (:_id stored)
md5 filename
md5 (to-db-object { :md5 md5 }))))
(deftest test-finding-multiple-files-on-gridfs
(let [input "./test/resources/mongo/js/mapfun1.js"
ct "binary/octet-stream"
md5 "14a09deabb50925a3381315149017bbd"
stored1 (store (make-input-file input)
(.setFilename "monger.test.gridfs.file6")
(.setContentType ct))
stored2 (store (make-input-file input)
(.setFilename "monger.test.gridfs.file7")
(.setContentType ct))
list1 (gridfs/find "monger.test.gridfs.file6")
list2 (gridfs/find "monger.test.gridfs.file7")
list3 (gridfs/find "888000___.monger.test.gridfs.file")
list4 (gridfs/find { :md5 md5 })]
(is (= 2 (count (gridfs/all-files))))
(are [a b] (is (= (map #(.get ^GridFSDBFile % "_id") a)
(map :_id b)))
list1 [stored1]
list2 [stored2]
list3 []
list4 [stored1 stored2])))
(deftest test-removing-multiple-files-from-gridfs
(let [input "./test/resources/mongo/js/mapfun1.js"
ct "binary/octet-stream"
md5 "14a09deabb50925a3381315149017bbd"
stored1 (store (make-input-file input)
(.setFilename "monger.test.gridfs.file8")
(.setContentType ct))
stored2 (store (make-input-file input)
(.setFilename "monger.test.gridfs.file9")
(.setContentType ct))]
(is (= 2 (count (gridfs/all-files))))
(gridfs/remove { :filename "monger.test.gridfs.file8" })
(is (= 1 (count (gridfs/all-files))))
(gridfs/remove { :md5 md5 })
(is (= 0 (count (gridfs/all-files))))))

View file

@ -6,7 +6,8 @@
(:import [org.joda.time DateTime ReadableInstant]
[org.joda.time.format ISODateTimeFormat]
[java.io StringWriter PrintWriter]
[org.bson.types ObjectId])
[org.bson.types ObjectId]
[com.mongodb DBObject])
(:require [clojure.data.json :as json]
[clj-time.core :as t]))

View file

@ -2,7 +2,7 @@
(ns monger.test.querying
(:refer-clojure :exclude [select find sort])
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject CommandResult$CommandFailure]
(:import [com.mongodb WriteResult WriteConcern DBCursor DBObject CommandResult$CommandFailure ReadPreference]
[org.bson.types ObjectId]
[java.util Date])
(:require [monger core util]
@ -11,7 +11,8 @@
[monger.test.helper :as helper])
(:use [clojure.test]
[monger.test.fixtures]
[monger conversion query operators]))
[monger conversion query operators joda-time]
[clj-time.core :only [date-time]]))
(helper/connect!)
@ -77,7 +78,7 @@
;; < ($lt), <= ($lte), > ($gt), >= ($gte)
(deftest query-using-dsl-and-$lt-operator
(deftest query-using-dsl-and-$lt-operator-with-integers
(let [coll "docs"
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year 2006 }
doc2 { :language "Java" :_id (ObjectId.) :inception_year 1992 }
@ -86,7 +87,21 @@
lt-result (with-collection "docs"
(find { :inception_year { $lt 2000 } })
(limit 2))]
(is (= [doc2] lt-result))))
(is (= [doc2] (vec lt-result)))))
(deftest query-using-dsl-and-$lt-operator-with-dates
(let [coll "docs"
;; these rely on monger.joda-time being loaded. MK.
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year (date-time 2006 1 1) }
doc2 { :language "Java" :_id (ObjectId.) :inception_year (date-time 1992 1 2) }
doc3 { :language "Scala" :_id (ObjectId.) :inception_year (date-time 2003 3 3) }
_ (mgcol/insert-batch coll [doc1 doc2])
lt-result (with-collection "docs"
(find { :inception_year { $lt (date-time 2000 1 2) } })
(limit 2))]
(is (= (map :_id [doc2])
(map :_id (vec lt-result))))))
@ -110,6 +125,20 @@
(find { :inception_year { "$gte" 2006 } }))))))
(deftest query-using-$gt-$lt-$gte-$lte-operators-using-dsl-composition
(let [coll "docs"
doc1 { :language "Clojure" :_id (ObjectId.) :inception_year 2006 }
doc2 { :language "Java" :_id (ObjectId.) :inception_year 1992 }
doc3 { :language "Scala" :_id (ObjectId.) :inception_year 2003 }
srt (-> {}
(limit 1)
(sort { :inception_year -1 }))
_ (mgcol/insert-batch coll [doc1 doc2 doc3])]
(is (= [doc1] (with-collection coll
(find { :inception_year { "$gt" 2002 } })
(merge srt))))))
;; $all
(deftest query-with-using-$all
@ -189,7 +218,8 @@
result1 (with-collection coll
(find {})
(paginate :page 1 :per-page 3)
(sort { :title 1 }))
(sort { :title 1 })
(read-preference ReadPreference/PRIMARY))
result2 (with-collection coll
(find {})
(paginate :page 2 :per-page 3)
@ -201,3 +231,20 @@
(is (= [doc1 doc5 doc7] result1))
(is (= [doc2 doc6 doc4] result2))
(is (= [doc3] result3))))
(deftest combined-querying-dsl-example1
(let [coll "docs"
ma-doc { :_id (ObjectId.) :name "Massachusetts" :iso "MA" :population 6547629 :joined_in 1788 :capital "Boston" }
de-doc { :_id (ObjectId.) :name "Delaware" :iso "DE" :population 897934 :joined_in 1787 :capital "Dover" }
ny-doc { :_id (ObjectId.) :name "New York" :iso "NY" :population 19378102 :joined_in 1788 :capital "Albany" }
ca-doc { :_id (ObjectId.) :name "California" :iso "CA" :population 37253956 :joined_in 1850 :capital "Sacramento" }
tx-doc { :_id (ObjectId.) :name "Texas" :iso "TX" :population 25145561 :joined_in 1845 :capital "Austin" }
top3 (partial-query (limit 3))
by-population-desc (partial-query (sort { :population -1 }))
_ (mgcol/insert-batch coll [ma-doc de-doc ny-doc ca-doc tx-doc])
result (with-collection coll
(find {})
(merge top3)
(merge by-population-desc))]
(is (= result [ca-doc tx-doc ny-doc]))))