Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Open sidebar
Sascha Herzinger
ada-server
Commits
18dc5942
Commit
18dc5942
authored
Jan 24, 2020
by
Peter Banda
Browse files
Size limits for Mongo capped stream repo increased
parent
36f23caf
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
5 additions
and
2 deletions
+5
-2
src/main/scala/org/ada/server/dataaccess/mongo/MongoAsyncRepo.scala
...cala/org/ada/server/dataaccess/mongo/MongoAsyncRepo.scala
+5
-2
No files found.
src/main/scala/org/ada/server/dataaccess/mongo/MongoAsyncRepo.scala
View file @
18dc5942
...
...
@@ -194,6 +194,9 @@ class MongoAsyncStreamRepo[E: Format, ID: Format](
@Inject
implicit
var
materializer
:
Materializer
=
_
private
val
maxSize
=
1024000
private
val
maxDocsSize
=
10000
override
lazy
val
stream
:
Source
[
E
,
NotUsed
]
=
AkkaStreamUtil
.
fromFutureSource
(
akkaCursor
.
map
(
_
.
documentSource
()))
...
...
@@ -219,12 +222,12 @@ class MongoAsyncStreamRepo[E: Format, ID: Format](
collection
.
stats
().
flatMap
{
case
stats
if
!
stats
.
capped
=>
// The collection is not capped, so we convert it
collection
.
convertToCapped
(
102400
,
Some
(
1000
))
collection
.
convertToCapped
(
maxSize
,
Some
(
maxDocsSize
))
case
_
=>
Future
(
collection
)
}.
recover
{
// The collection mustn't exist, create it
case
_
=>
collection
.
createCapped
(
102400
,
Some
(
1000
))
collection
.
createCapped
(
maxSize
,
Some
(
maxDocsSize
))
}.
flatMap
(
_
=>
if
(
timestampFieldName
.
isDefined
)
{
collection
.
indexesManager
.
ensure
(
Index
(
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment