Skip to content

Instantly share code, notes, and snippets.

@pagetronic
Created May 18, 2018 06:02
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save pagetronic/b3340900a107e0b52dd1ed945b158e36 to your computer and use it in GitHub Desktop.
Save pagetronic/b3340900a107e0b52dd1ed945b158e36 to your computer and use it in GitHub Desktop.
Aggregator exemples MongoDb
package live.page.base.db;
import com.mongodb.client.model.Accumulators;
import com.mongodb.client.model.BsonField;
import live.page.base.utils.Json;
import java.util.ArrayList;
import java.util.List;
public class Aggregator extends ArrayList<BsonField> {
private String[] keys;
public Aggregator(String... keys) {
this.keys = keys;
}
public List<BsonField> getGrouper(BsonField... groups) {
List<BsonField> grouper = new ArrayList<>();
List<String> addkeys = new ArrayList<>();
for (BsonField group : groups) {
grouper.add(group);
addkeys.add(group.getName());
}
for (String key : keys) {
if (!addkeys.contains(key)) {
grouper.add(Accumulators.first(key, "$" + key));
}
}
return grouper;
}
public Json getProjection() {
Json projection = new Json();
for (String key : keys) {
projection.put(key, true);
}
return projection;
}
public Json getProjectionOrder() {
Json projection = new Json("_id", "$_id");
for (String key : keys) {
projection.put(key, "$" + key);
}
return projection;
}
}
package live.page.base.forum;
import com.mongodb.client.model.Accumulators;
import com.mongodb.client.model.Aggregates;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.GraphLookupOptions;
import com.mongodb.client.model.Sorts;
import com.mongodb.client.model.UnwindOptions;
import live.page.base.db.Aggregator;
import live.page.base.db.Db;
import live.page.base.session.Users;
import live.page.base.utils.Json;
import live.page.base.utils.Settings;
import org.bson.BsonUndefined;
import org.bson.conversions.Bson;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class ForumsAggregator {
public static Json getForumData(String requestURI, String next_str, Users user, boolean remove) {
String[] urls = requestURI.split("/");
String url = urls[urls.length - 1];
if (url == null) {
return null;
}
Json rez = new Json();
Json forum = ForumsAggregator.getForum(Filters.eq("url", url), user);
if (forum == null) {
return null;
}
Json posts = PostsAggregator.getPosts((forum != null && forum.getList("branche") != null) ? Filters.in("parents", forum.getList("branche")) : null, 40, next_str, remove);
forum.remove("branche");
rez.put("forum", forum).put("posts", posts);
return rez;
}
public static Json getForum(Bson filter, Users user) {
Aggregator grouper = new Aggregator("initid", "title", "text", "parents",
"url", "breadcrumb", "childrens", "sisters", "parents", "pages", "urls"
);
List<Bson> pipeline = new ArrayList<>();
pipeline.add(Aggregates.match(filter));
pipeline.add(Aggregates.limit(1));
pipeline.add(Aggregates.graphLookup("Forums", new Json("$arrayElemAt", Arrays.asList("$parents", 0)), "parents.0", "_id", "breadcrumb", new GraphLookupOptions().depthField("depth").maxDepth(50)));
//**** breadcrumb
pipeline.add(Aggregates.unwind("$breadcrumb", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.graphLookup("Forums", "$breadcrumb._id", "parents.0", "_id", "breadcrumb.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$breadcrumb.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("breadcrumb.parents.depth", -1)));
pipeline.add(Aggregates.group(new Json("breadcrumb_id", "$breadcrumb._id").put("_id", "$_id"),
grouper.getGrouper(
Accumulators.first("initid", "$_id"),
Accumulators.first("order_", "$order"),
Accumulators.first("url", new Json("$arrayElemAt", Arrays.asList("$url", 0))),
Accumulators.push("urls", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.parents.url", 0)))
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("order_", true)
.put("breadcrumb", new Json("_id", true).put("title", true).put("id", "$breadcrumb._id").put("title", "$breadcrumb.title")
.put("depth", true).put("urlinit", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.url", 0)))
.put("url", new Json("$reduce", new Json("input", "$urls").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))))
)
));
pipeline.add(Aggregates.sort(new Json("breadcrumb.depth", -1)));
pipeline.add(Aggregates.group("$initid",
grouper.getGrouper(
Accumulators.first("order_", "$order_"),
Accumulators.push("urls", "$breadcrumb.urlinit"),
Accumulators.push("breadcrumb", "$breadcrumb")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("order_", true).put("initid", "$_id")
.put("url",
new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList(new Json("$arrayElemAt", Arrays.asList("$urls", 0)), null)),
new Json("$concat", Arrays.asList("/", "$url")),
new Json("$concat", Arrays.asList(new Json("$reduce", new Json("input", "$urls").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))), "/", "$url"))
))
)
.put("breadcrumb", new Json("title", true).put("url", true).put("id", true))
));
// breadcrumb ****/
//**** childrens
pipeline.add(Aggregates.lookup("Forums", "_id", "parents", "childrens"));
pipeline.add(Aggregates.unwind("$childrens", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.graphLookup("Forums", "$childrens._id", "parents.0", "_id", "childrens.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$childrens.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("childrens.parents.depth", -1)));
pipeline.add(Aggregates.group(new Json("childrens_id", "$childrens._id").put("_id", "$_id"),
grouper.getGrouper(
Accumulators.first("order_", "$order_"),
Accumulators.push("urls", new Json("$arrayElemAt", Arrays.asList("$childrens.parents.url", 0)))
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("order", new Json("$indexOfArray", Arrays.asList("$order_", "$childrens._id")))
.put("childrens", new Json("_id", true).put("id", "$childrens._id").put("title", "$childrens.title")
.put("url", new Json("$reduce", new Json("input", "$urls").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))))
)
));
pipeline.add(Aggregates.sort(Sorts.ascending("order")));
pipeline.add(Aggregates.group("$initid",
grouper.getGrouper(
Accumulators.push("childrens", "$childrens")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("childrens",
new Json("$filter", new Json("input", "$childrens").put("as", "childrens").put("cond", new Json("$ne", Arrays.asList("$$childrens._id", new BsonUndefined()))))
)
));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("childrens", new Json("title", true).put("url", true).put("id", true))
));
// childrens ****/
// **** sisters
pipeline.add(Aggregates.lookup("Forums", "parents", "parents", "sisters"));
pipeline.add(Aggregates.unwind("$sisters", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.graphLookup("Forums", "$sisters._id", "parents.0", "_id", "sisters.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$sisters.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("sisters.parents.depth", -1)));
pipeline.add(Aggregates.group(new Json("sisters_id", "$sisters._id").put("_id", "$_id"),
grouper.getGrouper(
Accumulators.push("urls", new Json("$arrayElemAt", Arrays.asList("$sisters.parents.url", 0)))
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("sisters", new Json("_id", true).put("date", true).put("id", "$sisters._id").put("title", "$sisters.title")
.put("url", new Json("$reduce", new Json("input", "$urls").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))))
)
));
pipeline.add(Aggregates.sort(new Json("sisters.date", -1)));
pipeline.add(Aggregates.group("$initid", grouper.getGrouper(
Accumulators.push("sisters", "$sisters")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("sisters",
new Json("$filter", new Json("input", "$sisters").put("as", "sisters").put("cond",
new Json("$and", Arrays.asList(
new Json("$ne", Arrays.asList("$$sisters._id", "$initid")),
new Json("$ne", Arrays.asList("$$sisters._id", new BsonUndefined()))
))
))
)
));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("sisters", new Json("title", true).put("url", true).put("id", true))
.put("parents_order", "$parents")
));
// sisters ****/
// **** parents
pipeline.add(Aggregates.lookup("Forums", "parents", "_id", "parents"));
pipeline.add(Aggregates.unwind("$parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.graphLookup("Forums", "$parents._id", "parents.0", "_id", "parents.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$parents.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("parents.parents.depth", -1)));
pipeline.add(Aggregates.group(new Json("parents_id", "$parents._id").put("_id", "$_id"),
grouper.getGrouper(
Accumulators.first("parents_order", "$parents_order"),
Accumulators.push("urls", new Json("$arrayElemAt", Arrays.asList("$parents.parents.url", 0)))
)
));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("pos", new Json("$indexOfArray", Arrays.asList("$parents_order", "$parents._id")))
.put("parents", new Json("_id", true).put("date", true).put("id", "$parents._id").put("title", "$parents.title")
.put("url", new Json("$reduce", new Json("input", "$urls").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))))
)
));
pipeline.add(Aggregates.sort(Sorts.ascending("pos")));
pipeline.add(Aggregates.group("$initid", grouper.getGrouper(
Accumulators.first("page_order", "$pages"),
Accumulators.push("parents", "$parents")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("page_order", true)
.put("parents", new Json("title", true).put("url", true).put("id", true))
));
// parents ****/
// **** pages
pipeline.add(Aggregates.lookup("Pages", "initid", "forums", "pages"));
pipeline.add(Aggregates.unwind("$pages", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.unwind("$pages.docs", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("pos_doc")));
pipeline.add(Aggregates.lookup("BlobFiles", "pages.docs", "_id", "pages.docs"));
pipeline.add(Aggregates.unwind("$pages.docs", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(Sorts.ascending("pos_doc")));
pipeline.add(Aggregates.group("$pages._id", grouper.getGrouper(
Accumulators.first("page_order", "$page_order"),
Accumulators.push("temp_doc", "$pages.docs"))));
pipeline.add(Aggregates.graphLookup("Pages", "$pages._id", "parents.0", "_id", "pages.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$pages.parents", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("depth")));
pipeline.add(Aggregates.sort(new Json("pages.parents.depth", -1)));
pipeline.add(Aggregates.group("$pages._id", grouper.getGrouper(
Accumulators.first("page_order", "$page_order"),
Accumulators.first("temp_doc", "$temp_doc"),
Accumulators.push("urls_pages", "$pages.parents.url")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("page_order", true)
.put("urls_pages", true)
.put("temp_doc",
new Json("$arrayElemAt", Arrays.asList(
new Json("$filter", new Json("input", "$temp_doc").put("as", "temp_doc").put("cond", new Json("$eq", Arrays.asList(new Json("$substr", Arrays.asList("$$temp_doc.type", 0, "image/".length())), "image/"))))
, 0))
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("pages", new Json("id", "$pages._id")
.put("order", new Json("$indexOfArray", Arrays.asList("$page_order", "$pages._id")))
.put("title", "$pages.title")
.put("intro", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$pages.intro", "")), null, "$pages.intro")))
.put("logo", new Json("$concat", Arrays.asList(Settings.getCDNHttp() + "/files/", "$temp_doc._id")))
.put("url", new Json("$reduce", new Json("input", "$urls_pages").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this"))))))
));
pipeline.add(Aggregates.sort(Sorts.ascending("pages.order")));
pipeline.add(Aggregates.group("$initid", grouper.getGrouper(
Accumulators.push("pages", "$pages")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("pages",
new Json("$filter", new Json("input", "$pages").put("as", "pages").put("cond", new Json("$ne", Arrays.asList("$$pages.id", new BsonUndefined()))))
)
));
// pages ****/
pipeline.add(Aggregates.graphLookup("Forums", "$_id", "_id", "parents", "branche", new GraphLookupOptions().maxDepth(1000)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("pages", new Json("id", true)
.put("title", true)
.put("intro", true)
.put("logo", true)
.put("url", true)
)
.put("branche", new Json("$concatArrays", Arrays.asList("$branche", Arrays.asList(new Json("_id", "$initid")))))
));
pipeline.add(Aggregates.unwind("$branche", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("branche", new Json("$concat", Arrays.asList("Forums(", "$branche._id", ")")))
.put("_id", "$_id")
.put("breadcrumb", new Json("$filter", new Json("input", "$breadcrumb").put("as", "breadcrumb").put("cond",
new Json("$ne", Arrays.asList("$$breadcrumb.id", new BsonUndefined()))
)))
));
pipeline.add(Aggregates.group("$_id", grouper.getGrouper(
Accumulators.push("branche", "$branche")
)));
pipeline.add(Aggregates.project(new Json("_id", false)
.put("id", "$_id")
.put("title", "$title").put("text", "$text").put("url", "$url").put("follow", "$follow")
.put("breadcrumb", "$breadcrumb")
.put("parents", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$_id", "ROOT")), new ArrayList<>(), "$parents")))
.put("childrens", "$childrens")
.put("sisters", "$sisters")
.put("pages", "$pages")
.put("branche", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$_id", "ROOT")), null, "$branche")))
));
return Db.aggregate("Forums", pipeline).first();
}
public static List<Json> sitemap() {
List<Bson> pipeline = new ArrayList<>();
pipeline.add(Aggregates.sort(Sorts.orderBy(Sorts.descending("date"), Sorts.ascending("_id"))));
pipeline.add(Aggregates.limit(10000));
pipeline.add(Aggregates.graphLookup("Forums", "$_id", "parents.0", "_id", "breadcrumb", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$breadcrumb", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("breadcrumb.depth", -1)));
pipeline.add(Aggregates.group("$_id", Arrays.asList(
Accumulators.push("urls", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.url", 0)))
)));
pipeline.add(Aggregates.project(new Json("loc", new Json("$concat", Arrays.asList(Settings.getFullHttp(),
new Json("$reduce", new Json("input", "$urls").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))))
))
));
return Db.aggregate("Forums", pipeline).into(new ArrayList<>());
}
}
package live.page.base.pages;
import com.mongodb.client.model.Accumulators;
import com.mongodb.client.model.Aggregates;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.GraphLookupOptions;
import com.mongodb.client.model.Sorts;
import com.mongodb.client.model.UnwindOptions;
import live.page.base.db.Aggregator;
import live.page.base.db.Db;
import live.page.base.db.Paginer;
import live.page.base.utils.Json;
import live.page.base.utils.Settings;
import live.page.base.forum.PostsAggregator;
import org.bson.BsonUndefined;
import org.bson.conversions.Bson;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class PagesAggregator {
public static Json getPage(Bson filter) {
Aggregator grouper = new Aggregator(
"initid", "id", "users", "title", "top_title", "intro", "text",
"docs", "logo", "date", "update", "url", "breadcrumb", "parents", "forums",
"childrens", "sisters", "links", "depth", "position", "urls", "temp_doc"
);
List<Bson> pipeline = new ArrayList<>();
pipeline.add(Aggregates.match(filter));
pipeline.add(Aggregates.limit(1));
pipeline.add(Aggregates.unwind("$docs", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("pos_doc")));
pipeline.add(Aggregates.lookup("BlobFiles", "docs", "_id", "docs"));
pipeline.add(Aggregates.unwind("$docs", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("docs", new Json("_id", true).put("type", true).put("size", true).put("text", true).put("url", new Json("$concat", Arrays.asList(Settings.getCDNHttp() + "/files/", "$docs._id")))))
);
pipeline.add(Aggregates.sort(Sorts.ascending("pos_doc")));
pipeline.add(Aggregates.group("$_id", grouper.getGrouper(Accumulators.push("docs", "$docs"))));
pipeline.add(Aggregates.graphLookup("Pages", new Json("$arrayElemAt", Arrays.asList("$parents", 0)), "parents.0", "_id", "breadcrumb", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$breadcrumb", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.graphLookup("Pages", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.parents", 0)), "parents.0", "_id", "breadcrumb.breadcrumb", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.sort(new Json("breadcrumb.depth", -1)));
pipeline.add(Aggregates.group("$_id",
grouper.getGrouper(
Accumulators.push("breadcrumb", "$breadcrumb"),
Accumulators.push("urls", "$breadcrumb.url")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("logo", new Json("$arrayElemAt", Arrays.asList(
new Json("$filter", new Json("input", "$docs").put("as", "logo").put("cond", new Json("$eq", Arrays.asList(new Json("$substr", Arrays.asList("$$logo.type", 0, "image/".length())), "image/"))))
, 0)))
.put("links", new Json("$split", Arrays.asList("$text", "Pages(")))
.put("urls", new Json("$cond", Arrays.asList(new Json("$gt", Arrays.asList(new Json("$size", "$urls"), 0)), new Json("$slice", Arrays.asList("$urls", 1, 100)), "$urls")))
)
);
pipeline.add(Aggregates.project(grouper.getProjection()
.put("logo", new Json("$concat", Arrays.asList(Settings.getCDNHttp() + "/files/", "$logo._id")))
.put("links", new Json("$slice", Arrays.asList("$links", 1, 100)))));
pipeline.add(Aggregates.unwind("$links", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.project(grouper.getProjection().put("links", new Json("$substrCP", Arrays.asList("$links", 0, Db.DB_KEY_LENGTH)))));
pipeline.add(Aggregates.group("$_id", grouper.getGrouper(
Accumulators.first("parents_", "$parents"),
Accumulators.first("initid", "$_id"),
Accumulators.push("links", "$links")
)));
//parents
pipeline.add(Aggregates.lookup("Pages", "parents", "_id", "parents"));
pipeline.add(Aggregates.unwind("$parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.unwind("$parents.docs", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("pos_doc")));
pipeline.add(Aggregates.lookup("BlobFiles", "parents.docs", "_id", "parents.docs"));
pipeline.add(Aggregates.unwind("$parents.docs", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(Sorts.ascending("pos_doc")));
pipeline.add(Aggregates.group("$parents._id", grouper.getGrouper(
Accumulators.push("temp_doc", "$parents.docs"),
Accumulators.first("parents_", "$parents_")
)));
pipeline.add(Aggregates.graphLookup("Pages", new Json("$arrayElemAt", Arrays.asList("$parents.parents", 0)), "parents.0", "_id", "parents.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$parents.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("parents.parents.depth", -1)));
pipeline.add(Aggregates.group(new Json("parents_id", "$parents._id").put("initid", "$initid"),
grouper.getGrouper(
Accumulators.first("parents_", "$parents_"),
Accumulators.first("pos", "$pos"),
Accumulators.first("parents_url", "$parents.url"),
Accumulators.push("urls_", "$parents.parents.url")
)
));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("temp_doc",
new Json("$arrayElemAt", Arrays.asList(
new Json("$filter", new Json("input", "$temp_doc").put("as", "logo").put("cond", new Json("$eq", Arrays.asList(new Json("$substr", Arrays.asList("$$logo.type", 0, "image/".length())), "image/"))))
, 0))
)
.put("pos", new Json("$indexOfArray", Arrays.asList("$parents_", "$parents._id")))
.put("urls_", new Json("$cond", Arrays.asList(new Json("$gt", Arrays.asList(new Json("$size", "$urls_"), 0)), new Json("$slice", Arrays.asList("$urls_", 1, 100)), "$urls_")))
.put("parents_url", true)));
pipeline.add(Aggregates.project(grouper.getProjection().put("pos", true)
.put("breadcrumb", true).put("parents", true).put("childrens", true).put("sisters", true).put("links", true).put("urls", true)
.put("parents", new Json().put("_id", "$parents._id").put("title", "$parents.title").put("intro", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$parents.intro", "")), null, "$parents.intro")))
.put("logo", new Json("$concat", Arrays.asList(Settings.getCDNHttp() + "/files/", "$temp_doc._id")))
.put("url", new Json("$concat", Arrays.asList(new Json("$reduce", new Json("input", "$urls_").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))), "/", "$parents_url"))))
));
pipeline.add(Aggregates.sort(new Json("pos", 1)));
pipeline.add(Aggregates.group("$initid",
grouper.getGrouper(
Accumulators.first("childrens_", "$childrens"),
Accumulators.push("parents", "$parents")
)));
//childrens
pipeline.add(Aggregates.lookup("Pages", "_id", "parents", "childrens"));
pipeline.add(Aggregates.unwind("$childrens", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.unwind("$childrens.docs", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("pos_doc")));
pipeline.add(Aggregates.lookup("BlobFiles", "childrens.docs", "_id", "childrens.docs"));
pipeline.add(Aggregates.unwind("$childrens.docs", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(Sorts.ascending("pos_doc")));
pipeline.add(Aggregates.group("$childrens._id", grouper.getGrouper(
Accumulators.first("childrens_", "$childrens_"),
Accumulators.push("temp_doc", "$childrens.docs")
)));
pipeline.add(Aggregates.graphLookup("Pages", new Json("$arrayElemAt", Arrays.asList("$childrens.parents", 0)), "parents.0", "_id", "childrens.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$childrens.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("childrens.parents.depth", -1)));
pipeline.add(Aggregates.group("$childrens._id", grouper.getGrouper(
Accumulators.first("childrens_", "$childrens_"),
Accumulators.first("childrens_url", "$childrens.url"),
Accumulators.push("urls_", "$childrens.parents.url")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("temp_doc",
new Json("$arrayElemAt", Arrays.asList(
new Json("$filter", new Json("input", "$temp_doc").put("as", "logo").put("cond", new Json("$eq", Arrays.asList(new Json("$substr", Arrays.asList("$$logo.type", 0, "image/".length())), "image/"))))
, 0))
)
.put("childrens_", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$childrens_", null)), new ArrayList<>(), "$childrens_")))
.put("urls_", new Json("$cond", Arrays.asList(new Json("$gt", Arrays.asList(new Json("$size", "$urls_"), 0)), new Json("$slice", Arrays.asList("$urls_", 1, 100)), "$urls_")))
.put("childrens_url", true)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("pos", new Json("$indexOfArray", Arrays.asList("$childrens_", "$childrens._id")))
.put("breadcrumb", true).put("parents", true).put("childrens", true).put("sisters", true).put("links", true).put("urls", true)
.put("childrens", new Json()
.put("_id", "$childrens._id").put("title", "$childrens.title").put("intro", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$childrens.intro", "")), null, "$childrens.intro")))
.put("url", new Json("$concat", Arrays.asList(new Json("$reduce", new Json("input", "$urls_").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))), "/", "$childrens_url")))
.put("logo", new Json("$concat", Arrays.asList(Settings.getCDNHttp() + "/files/", "$temp_doc._id")))
)
));
pipeline.add(Aggregates.sort(Sorts.orderBy(Sorts.ascending("pos"), Sorts.ascending("childrens.title"))));
pipeline.add(Aggregates.group("$initid", grouper.getGrouper(
Accumulators.push("childrens", "$childrens")
)));
//sisters
pipeline.add(Aggregates.lookup("Pages", "parents._id", "parents", "sisters"));
pipeline.add(Aggregates.unwind("$sisters", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("pos")));
pipeline.add(Aggregates.unwind("$sisters.docs", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("pos_doc")));
pipeline.add(Aggregates.lookup("BlobFiles", "sisters.docs", "_id", "sisters.docs"));
pipeline.add(Aggregates.unwind("$sisters.docs", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(Sorts.ascending("pos_doc")));
pipeline.add(Aggregates.group("$sisters._id", grouper.getGrouper(Accumulators.push("temp_doc", "$sisters.docs"))));
pipeline.add(Aggregates.graphLookup("Pages", new Json("$arrayElemAt", Arrays.asList("$sisters.parents", 0)), "parents.0", "_id", "sisters.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$sisters.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("sisters.parents.depth", -1)));
pipeline.add(Aggregates.group("$sisters._id", grouper.getGrouper(
Accumulators.first("pos", "$pos"),
Accumulators.first("sisters_url", "$sisters.url"),
Accumulators.push("urls_", "$sisters.parents.url")
)));
pipeline.add(Aggregates.project(grouper.getProjection().put("pos", true)
.put("temp_doc",
new Json("$arrayElemAt", Arrays.asList(
new Json("$filter", new Json("input", "$temp_doc").put("as", "logo").put("cond", new Json("$eq", Arrays.asList(new Json("$substr", Arrays.asList("$$logo.type", 0, "image/".length())), "image/"))))
, 0))
)
.put("urls_", new Json("$cond", Arrays.asList(new Json("$gt", Arrays.asList(new Json("$size", "$urls_"), 0)), new Json("$slice", Arrays.asList("$urls_", 1, 100)), "$urls_")))
.put("sisters_url", true)));
pipeline.add(Aggregates.project(grouper.getProjection().put("pos", true)
.put("breadcrumb", true).put("parents", true).put("childrens", true).put("sisters", true).put("links", true).put("urls", true)
.put("sisters", new Json().put("_id", "$sisters._id").put("title", "$sisters.title").put("intro", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$sisters.intro", "")), null, "$sisters.intro")))
.put("url", new Json("$concat", Arrays.asList(new Json("$reduce", new Json("input", "$urls_").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))), "/", "$sisters_url")))
.put("logo", new Json("$concat", Arrays.asList(Settings.getCDNHttp() + "/files/", "$temp_doc._id")))
)
));
pipeline.add(Aggregates.sort(Sorts.orderBy(Sorts.descending("pos"), Sorts.ascending("sisters.title"))));
pipeline.add(Aggregates.group("$initid", grouper.getGrouper(
Accumulators.push("sisters", "$sisters")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("sisters", new Json("$filter", new Json("input", "$sisters").put("as", "sisters").put("cond", new Json("$ne", Arrays.asList("$$sisters._id", "$_id")))))
));
//links
pipeline.add(Aggregates.lookup("Pages", "links", "_id", "links"));
pipeline.add(Aggregates.unwind("$links", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.graphLookup("Pages", new Json("$arrayElemAt", Arrays.asList("$links.parents", 0)), "parents.0", "_id", "links.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$links.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("links.parents.depth", -1)));
pipeline.add(Aggregates.group("$links._id", grouper.getGrouper(
Accumulators.first("links_url", "$links.url"),
Accumulators.push("urls_", "$links.parents.url")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("urls_", new Json("$cond",
Arrays.asList(
new Json("$gt",
Arrays.asList(new Json("$size", "$urls_"), 0)), new Json("$slice", Arrays.asList("$urls_", 1, 100)), "$urls_")))
.put("links_url", true)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("breadcrumb", true).put("parents", true).put("childrens", true).put("sisters", true).put("links", true).put("urls", true)
.put("links", new Json().put("_id", "$links._id").put("title", "$links.title").put("intro", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$links.intro", "")), null, "$links.intro")))
.put("docs", "$links.docs")
.put("url", new Json("$concat", Arrays.asList(new Json("$reduce", new Json("input", "$urls_").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))), "/", "$links_url"))))
));
pipeline.add(Aggregates.sort(new Json("links.title", 1)));
pipeline.add(Aggregates.group("$initid", grouper.getGrouper(
Accumulators.push("links", "$links")
)));
//breadcrumb
pipeline.add(Aggregates.unwind("$breadcrumb", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("position")));
pipeline.add(Aggregates.unwind("$breadcrumb.docs", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("pos_doc")));
pipeline.add(Aggregates.lookup("BlobFiles", "breadcrumb.docs", "_id", "breadcrumb.docs"));
pipeline.add(Aggregates.unwind("$breadcrumb.docs", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(Sorts.ascending("pos_doc")));
pipeline.add(Aggregates.group("$breadcrumb._id", grouper.getGrouper(Accumulators.push("temp_doc", "$breadcrumb.docs"))));
pipeline.add(Aggregates.graphLookup("Pages", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.parents", 0)), "parents.0", "_id", "breadcrumb.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$breadcrumb.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("breadcrumb.parents.depth", -1)));
pipeline.add(Aggregates.group("$breadcrumb._id", grouper.getGrouper(
Accumulators.first("breadcrumb_url", "$breadcrumb.url"),
Accumulators.push("urls_", "$breadcrumb.parents.url")
)));
pipeline.add(Aggregates.sort(new Json("position", 1)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("temp_doc",
new Json("$arrayElemAt", Arrays.asList(
new Json("$filter", new Json("input", "$temp_doc").put("as", "logo").put("cond", new Json("$eq", Arrays.asList(new Json("$substr", Arrays.asList("$$logo.type", 0, "image/".length())), "image/"))))
, 0))
)
.put("urls_", new Json("$cond", Arrays.asList(new Json("$gt", Arrays.asList(new Json("$size", "$urls_"), 0)), new Json("$slice", Arrays.asList("$urls_", 1, 100)), "$urls_")))
.put("breadcrumb_url", true)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("breadcrumb", true).put("parents", true).put("childrens", true).put("sisters", true).put("links", true).put("urls", true)
.put("breadcrumb", new Json().put("_id", "$breadcrumb._id").put("title", "$breadcrumb.title").put("intro", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$breadcrumb.intro", "")), null, "$breadcrumb.intro")))
.put("url", new Json("$concat", Arrays.asList(new Json("$reduce", new Json("input", "$urls_").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))), "/", "$breadcrumb_url")))
.put("logo", new Json("$concat", Arrays.asList(Settings.getCDNHttp() + "/files/", "$temp_doc._id")))
)
));
pipeline.add(Aggregates.group("$initid", grouper.getGrouper(
Accumulators.first("forums_", "$forums"),
Accumulators.push("breadcrumb", "$breadcrumb")
)));
///<Forums>
pipeline.add(Aggregates.lookup("Forums", "forums", "_id", "forums"));
pipeline.add(Aggregates.unwind("$forums", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.graphLookup("Forums", "$forums._id", "parents.0", "_id", "forums.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.graphLookup("Forums", "$forums._id", "_id", "parents", "forums.branche", new GraphLookupOptions().maxDepth(5000)));
pipeline.add(Aggregates.unwind("$forums.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(Sorts.descending("forums.parents.depth")));
pipeline.add(Aggregates.group("$forums._id", grouper.getGrouper(
Accumulators.first("forums_", "$forums_"),
Accumulators.push("urlsforum", new Json("$arrayElemAt", Arrays.asList("$forums.parents.url", 0)))
)));
//BRANCHE
pipeline.add(Aggregates.project(grouper.getProjection()
.put("forums", new Json()
.put("id", "$forums._id")
.put("title", "$forums.title")
.put("url", new Json("$reduce", new Json("input", "$urlsforum").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))))
.put("branche",
new Json("$concatArrays", Arrays.asList(
Arrays.asList(new Json("_id", "$forums._id")),
new Json("$filter", new Json("input", "$forums.branche").put("as", "branche").put("cond", new Json("$ne", Arrays.asList("$$branche._id", new BsonUndefined()))))
))
)
)
.put("pos", new Json("$indexOfArray", Arrays.asList("$forums_", "$forums._id")))
));
pipeline.add(Aggregates.unwind("$forums.branche", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.project(grouper.getProjection().put("pos", true)
.put("forums", new Json()
.put("id", true)
.put("title", true)
.put("url", true)
.put("branche",
new Json("$concat", Arrays.asList("Forums(", "$forums.branche._id", ")"))
)
)
));
pipeline.add(Aggregates.group("$forums.id", grouper.getGrouper(
Accumulators.first("pos", "$pos"),
Accumulators.push("forums_branche", "$forums.branche")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("pos", true)
.put("forums", new Json()
.put("id", true)
.put("title", true)
.put("url", true)
.put("branche", new Json("$filter", new Json("input", "$forums_branche").put("as", "branche").put("cond", new Json("$ne", Arrays.asList("$$branche", null)))))
)
));
///
pipeline.add(Aggregates.sort(Sorts.ascending("pos")));
pipeline.add(Aggregates.group("$initid", grouper.getGrouper(Accumulators.push("forums", "$forums"))));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("forums",
new Json("$filter", new Json("input", "$forums").put("as", "forums").put("cond", new Json("$ne", Arrays.asList("$$forums.id", new BsonUndefined()))))
)
));
//</Forums>
//<users>
pipeline.add(Aggregates.unwind("$users", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("users_pos")));
pipeline.add(Aggregates.lookup("Users", "users", "_id", "users"));
pipeline.add(Aggregates.sort(Sorts.ascending("users_pos")));
pipeline.add(Aggregates.group("$_id", grouper.getGrouper(Accumulators.push("users", new Json("$arrayElemAt", Arrays.asList("$users", 0))))));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("users", new Json("_id", true).put("name", true).put("avatar", true)
)
));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("docs", new Json("$filter", new Json("input", "$docs").put("as", "docs").put("cond", new Json("$ne", Arrays.asList("$$docs._id", new BsonUndefined())))))
.put("users",
new Json("$filter", new Json("input", "$users").put("as", "users").put("cond", new Json("$ne", Arrays.asList("$$users._id", new BsonUndefined()))))
)
));
//</users>
pipeline.add(Aggregates.project(grouper.getProjection().put("_id", false).remove("initid").remove("urls").put("id", "$initid")
.put("text", new Json("$cond", Arrays.asList(new Json("$or", Arrays.asList(new Json("$eq", Arrays.asList("$text", "")), new Json("$eq", Arrays.asList("$text", "\n")))), null, "$text")))
.put("intro", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$intro", "")), null, "$intro")))
.put("breadcrumb", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList(new Json("$arrayElemAt", Arrays.asList("$breadcrumb.url", 0)), null)), new ArrayList<>(), "$breadcrumb")))
.put("childrens", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList(new Json("$arrayElemAt", Arrays.asList("$childrens.url", 0)), null)), new ArrayList<>(), "$childrens")))
.put("sisters", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList(new Json("$arrayElemAt", Arrays.asList("$sisters.url", 0)), null)), new ArrayList<>(), "$sisters")))
.put("parents", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList(new Json("$arrayElemAt", Arrays.asList("$parents.url", 0)), null)), new ArrayList<>(), "$parents")))
.put("links", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList(new Json("$arrayElemAt", Arrays.asList("$links.url", 0)), null)), new ArrayList<>(), "$links")))
.put("url", new Json("$concat", Arrays.asList(new Json("$reduce", new Json("input", "$urls").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))), "/", "$url")))));
pipeline.add(Aggregates.project(new Json()
.put("id", "$id")
.put("url", "$url")
.put("title", "$title")
.put("logo", "$logo")
.put("users", "$users")
.put("top_title", "$top_title")
.put("date", "$date")
.put("update", "$update")
.put("intro", "$intro")
.put("text", "$text")
.put("docs", "$docs")
.put("breadcrumb", "$breadcrumb")
.put("childrens", "$childrens")
.put("parents", "$parents")
.put("sisters", "$sisters")
.put("links", "$links")
.put("forums", "$forums")
));
Json page = Db.aggregate("Pages", pipeline).first();
if (page == null) {
return null;
}
List<String> branche = new ArrayList<>();
if (page.get("forums") != null) {
for (Json forum : page.getListJson("forums")) {
if (forum.get("branche") != null) {
for (String forum_branche : forum.getList("branche")) {
if (!branche.contains(forum_branche)) {
branche.add(forum_branche);
}
forum.remove("branche");
}
}
}
}
if (branche.size() > 0) {
page.put("posts", PostsAggregator.getPosts(Filters.or(Filters.eq("pages", page.getId()), Filters.in("parents", branche)), 30, null, false));
} else {
page.put("posts", PostsAggregator.getPosts(Filters.eq("pages", page.getId()), 30, null, false));
}
return page;
}
public static Json getPages(Bson filter, int limit, String next_str) {
Paginer paginer = new Paginer(next_str, new Json("update", -1));
Aggregator grouper = new Aggregator("id", "title", "logo", "top_title", "date", "update", "intro", "url", "breadcrumb");
List<Bson> pipeline = new ArrayList<>();
List<Bson> filters = new ArrayList<>();
if (filter != null) {
filters.add(filter);
}
Bson next_filter = paginer.getPaging();
if (next_filter != null) {
filters.add(next_filter);
}
if (filters.size() > 0) {
pipeline.add(Aggregates.match(Filters.and(filters)));
}
pipeline.add(Aggregates.sort(paginer.getFirstSort()));
pipeline.add(Aggregates.limit(limit + 2));
pipeline.add(Aggregates.unwind("$docs", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("pos_doc")));
pipeline.add(Aggregates.lookup("BlobFiles", "docs", "_id", "docs"));
pipeline.add(Aggregates.sort(Sorts.ascending("pos_doc")));
pipeline.add(Aggregates.unwind("$docs", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.group("$_id", grouper.getGrouper(
Accumulators.first("parents", "$parents"),
Accumulators.push("docs", "$docs")
)));
pipeline.add(Aggregates.graphLookup("Pages", new Json("$arrayElemAt", Arrays.asList("$parents", 0)), "parents.0", "_id", "breadcrumb", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$breadcrumb", new UnwindOptions().preserveNullAndEmptyArrays(true)));
/****/
pipeline.add(Aggregates.graphLookup("Pages", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.parents", 0)), "parents.0", "_id", "breadcrumb.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$breadcrumb.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("breadcrumb.parents.depth", -1)));
pipeline.add(Aggregates.group(new Json("bid", "$breadcrumb._id").put("id", "$_id"),
grouper.getGrouper(
Accumulators.first("id", "$_id"),
Accumulators.first("docs", "$docs"),
Accumulators.push("urls_", "$breadcrumb.parents.url")
))
);
pipeline.add(Aggregates.sort(new Json("breadcrumb.depth", -1)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("logo", new Json("$arrayElemAt", Arrays.asList(
new Json("$filter", new Json("input", "$docs").put("as", "logo").put("cond", new Json("$eq", Arrays.asList(new Json("$substr", Arrays.asList("$$logo.type", 0, "image/".length())), "image/"))))
, 0)))
.put("breadcrumb", new Json().put("_id", true).put("title", true).put("url", true)).put("urls_", true)
.put("urls_", new Json("$cond", Arrays.asList(new Json("$gt", Arrays.asList(new Json("$size", "$urls_"), 0)), new Json("$slice", Arrays.asList("$urls_", 1, 100)), "$urls_")))
)
);
pipeline.add(Aggregates.project(grouper.getProjection()
.put("logo", new Json("$concat", Arrays.asList(Settings.getCDNHttp() + "/files/", "$logo._id")))
.put("breadcrumb",
new Json().put("id", "$breadcrumb._id").put("title", "$breadcrumb.title").put("init_url", "$breadcrumb.url").put("url", new Json("$concat", Arrays.asList(new Json("$reduce", new Json("input", "$urls_").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))), "/", "$breadcrumb.url")))
)
)
);
/*****/
pipeline.add(Aggregates.sort(new Json("breadcrumb.depth", -1)));
pipeline.add(Aggregates.group("$id", grouper.getGrouper(
Accumulators.push("breadcrumb", "$breadcrumb"),
Accumulators.push("urls", "$breadcrumb.init_url")
))
);
pipeline.add(Aggregates.project(grouper.getProjection()
.put("breadcrumb", new Json("$slice", Arrays.asList(new Json("$filter", new Json("input", "$breadcrumb").put("as", "breadcrumb").put("cond", new Json("$ne", Arrays.asList("$breadcrumb.id", new BsonUndefined())))), 1, 100)))
.put("urls", new Json("$cond", Arrays.asList(new Json("$gt", Arrays.asList(new Json("$size", "$urls"), 0)), new Json("$slice", Arrays.asList("$urls", 1, 100)), "$urls")))
)
);
pipeline.add(Aggregates.project(grouper.getProjection().remove("id")
.put("top_title", new Json("$cond", Arrays.asList(new Json("$ne", Arrays.asList("$top_title", "")), "$top_title", null)))
.put("intro", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$intro", "")), null, "$intro")))
.put("url", new Json("$concat", Arrays.asList(new Json("$reduce", new Json("input", "$urls").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))), "/", "$url")))
.put("breadcrumb", new Json().put("id", true).put("title", true).put("url", true))
));
pipeline.add(Aggregates.project(grouper.getProjectionOrder()));
pipeline.add(Aggregates.sort(paginer.getLastSort()));
return paginer.getResult(Db.aggregate("Pages", pipeline).into(new ArrayList<>()), limit);
}
public static List<Json> sitemap() {
List<Bson> pipeline = new ArrayList<>();
pipeline.add(Aggregates.sort(Sorts.orderBy(Sorts.descending("update"), Sorts.ascending("_id"))));
pipeline.add(Aggregates.limit(10000));
pipeline.add(Aggregates.graphLookup("Pages", "$_id", "parents.0", "_id", "breadcrumb", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$breadcrumb", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("breadcrumb.depth", -1)));
pipeline.add(Aggregates.group("$_id", Arrays.asList(
Accumulators.first("date", "$date"),
Accumulators.first("update", "$update"),
Accumulators.push("urls", "$breadcrumb.url")
)));
pipeline.add(Aggregates.project(
new Json("_id", false)
.put("lastmod",
new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$update", null)), "$date", "$update"))
)
.put("loc", new Json("$concat", Arrays.asList(Settings.getFullHttp(),
new Json("$reduce", new Json("input", new Json("$cond", Arrays.asList(new Json("$gt", Arrays.asList(new Json("$size", "$urls"), 1)), new Json("$slice", Arrays.asList("$urls", 1, 100)), "$urls"))).put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))))
))
));
pipeline.add(Aggregates.sort(Sorts.orderBy(Sorts.descending("lastmod"))));
return Db.aggregate("Pages", pipeline).into(new ArrayList<>());
}
}
package live.page.base.db;
import com.mongodb.client.model.Filters;
import live.page.base.utils.Json;
import org.bson.conversions.Bson;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class Paginer {
private Json sort_rez = new Json();
private Json first_sort_rez = new Json();
private Json paging_json = new Json();
public Paginer(String paging_str, Json def) {
this(paging_str, null, null, def);
}
public Paginer(String paging_str, String sort_str, List<String> sort_possible, Json def) {
if (paging_str != null && !paging_str.equals("")) {
List<String> dater = new ArrayList<>();
if (sort_possible != null) {
dater.addAll(sort_possible);
}
dater.add(def.keySet().toArray(new String[0])[0]);
paging_json = decrypt(paging_str, dater);
}
if (sort_str != null && sort_str != "") {
if (sort_possible.contains(sort_str.replace("-", ""))) {
first_sort_rez.put(sort_str.replaceAll("^-", ""), (sort_str.startsWith("-")) ? 1 : -1);
sort_rez.put(sort_str.replaceAll("^-", ""), (sort_str.startsWith("-")) ? -1 : 1);
}
}
if (sort_rez.isEmpty()) {
String def_key = def.keySet().toArray(new String[0])[0];
first_sort_rez.put(def_key, 0 - def.getInteger(def_key));
sort_rez.putAll(def);
}
first_sort_rez.put("_id", -1);
sort_rez.put("_id", 1);
}
public Json getFirstSort() {
if (!isPrev()) {
return getLastSort();
} else {
return first_sort_rez;
}
}
public Json getLastSort() {
return sort_rez;
}
public Bson getPaging() {
if (paging_json.isEmpty()) {
return null;
}
List<Bson> filter = new ArrayList<>();
for (String key : sort_rez.keySet()) {
if (!key.equals("_id") && paging_json.containsKey(key)) {
if (!isPrev()) {
if (sort_rez.getInteger(key) < 0) {
filter.add(Filters.or(
Filters.lt(key, paging_json.get(key)),
Filters.and(Filters.eq(key, paging_json.get(key)), Filters.gte("_id", paging_json.getId()))
));
} else {
filter.add(Filters.or(
Filters.gt(key, paging_json.get(key)),
Filters.and(Filters.eq(key, paging_json.get(key)), Filters.gte("_id", paging_json.getId()))
));
}
} else {
if (sort_rez.getInteger(key) < 0) {
filter.add(Filters.or(
Filters.gt(key, paging_json.get(key)),
Filters.and(Filters.eq(key, paging_json.get(key)), Filters.lte("_id", paging_json.getId()))
));
} else {
filter.add(Filters.or(
Filters.lt(key, paging_json.get(key)),
Filters.and(Filters.eq(key, paging_json.get(key)), Filters.lte("_id", paging_json.getId()))
));
}
}
}
}
if (filter.size() == 0) {
return null;
}
return Filters.and(filter);
}
public boolean isPrev() {
return paging_json.getInteger("@", 1) < 0;
}
public Json getResult(List<Json> results, int limit) {
Json rez = new Json("result", results.subList((isPrev() && results.size() > limit + 1 ? 1 : 0), Math.min(results.size(), limit + (isPrev() && results.size() > limit + 1 ? 1 : 0))));
Json paging = new Json();
if (results.size() > limit) {
Json last = results.get(Math.min(limit + (isPrev() ? 1 : 0), results.size() - 1));
Json next = new Json("@", 1);
for (String key : sort_rez.keySet()) {
if (key.equals("last")) {
next.put(key, last.getJson("last").getDate("date"));
} else {
next.put(key, last.get(key));
}
}
paging.put("next", encrypt(next));
}
if (results.size() > 0 && !paging_json.isEmpty() && (!isPrev() || (isPrev() && results.size() > limit + 1))) {
Json first = results.get(Math.min(isPrev() ? 1 : 0, results.size() - 1));
Json prev = new Json("@", -1);
for (String key : sort_rez.keySet()) {
if (key.equals("last")) {
prev.put(key, first.getJson("last").getDate("date"));
} else {
prev.put(key, first.get(key));
}
}
paging.put("prev", encrypt(prev));
}
if (limit > 0) {
paging.put("limit", limit);
}
rez.put("paging", paging);
return rez;
}
private String encrypt(Json ele) {
return encodeString(ele.toString(true));
}
private static Json decrypt(String next, List<String> keys) {
if (next.equals("")) {
return null;
}
Json dec = new Json(decodeString(next));
if (dec == null || dec.isEmpty()) {
return null;
}
if (dec.containsKey("id")) {
dec.put("_id", dec.getId());
dec.remove("id");
}
if (keys != null) {
for (String key : Arrays.asList("date", "update", "archived", "join", "last")) {
if (dec.containsKey(key)) {
dec.put(key, dec.parseDate(key));
}
}
}
return dec;
}
private static String encodeString(String str) {
try {
BigInteger base = BigInteger.valueOf(62);
byte[] bytes = str.getBytes("UTF-8");
BigInteger number = new BigInteger(bytes);
if (BigInteger.ZERO.compareTo(number) == 0) {
return "0";
}
BigInteger value = number.add(BigInteger.ZERO);
StringBuilder sb = new StringBuilder();
while (BigInteger.ZERO.compareTo(value) < 0) {
BigInteger[] reminder = value.divideAndRemainder(base);
int remainder = reminder[1].intValue();
if (remainder < 10) {
sb.insert(0, (char) (remainder + '0'));
} else if (remainder < 10 + 26) {
sb.insert(0, (char) (remainder + 'a' - 10));
} else {
sb.insert(0, (char) (remainder + 'A' - 10 - 26));
}
value = reminder[0];
}
return reverseString(sb.toString());
} catch (Exception e) {
return null;
}
}
private static String decodeString(String enc) {
try {
String str = reverseString(enc);
BigInteger base = BigInteger.valueOf(62);
byte[] base62Bytes = str.getBytes("UTF-8");
BigInteger rez = BigInteger.ZERO;
BigInteger multiplier = BigInteger.ONE;
for (int i = base62Bytes.length - 1; i >= 0; i--) {
byte byt = base62Bytes[i];
int alpha = byt - '0';
if (Character.isLowerCase(byt)) {
alpha = byt - ('a' - 10);
} else if (Character.isUpperCase(byt)) {
alpha = byt - ('A' - 10 - 26);
}
rez = rez.add(multiplier.multiply(BigInteger.valueOf(alpha)));
multiplier = multiplier.multiply(base);
}
return new String(rez.toByteArray(), "UTF-8");
} catch (Exception e) {
return null;
}
}
private static String reverseString(String str) {
if (str.length() == 1) {
return str;
} else {
return str.charAt(str.length() - 1) + reverseString(str.substring(0, str.length() - 1));
}
}
}
package live.page.base.forum;
import com.mongodb.client.model.Accumulators;
import com.mongodb.client.model.Aggregates;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.GraphLookupOptions;
import com.mongodb.client.model.Sorts;
import com.mongodb.client.model.UnwindOptions;
import live.page.base.db.Aggregator;
import live.page.base.db.Db;
import live.page.base.db.Paginer;
import live.page.base.session.Users;
import live.page.base.utils.Json;
import live.page.base.utils.Settings;
import org.bson.BsonUndefined;
import org.bson.conversions.Bson;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class PostsAggregator {
public static Json getSimplePost(String _id, Users user, boolean remove) {
List<Bson> pipeline = new ArrayList<>();
Aggregator grouper = new Aggregator("id", "title", "remove", "replies", "date", "last", "user",
"sysid", "posts", "parents", "branche", "pages", "url", "breadcrumb");
Json postproject = new Json("id", true).put("date", true).put("update", true).put("user", true).put("sysid", true).put("title", true)
.put("text", true).put("coins", true).put("docs", true).put("changes", true).put("comments", true).put("remove", true);
pipeline.add(Aggregates.match(Filters.and(Filters.eq("_id", _id))));
pipeline.add(Aggregates.limit(1));
//pages
pipeline.add(Aggregates.project(grouper.getProjection().put("page_order", "$pages")));
pipeline.add(Aggregates.lookup("Pages", "pages", "_id", "pages"));
pipeline.add(Aggregates.unwind("$pages", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.unwind("$pages.docs", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("pos_doc")));
pipeline.add(Aggregates.lookup("BlobFiles", "pages.docs", "_id", "pages.docs"));
pipeline.add(Aggregates.unwind("$pages.docs", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(Sorts.ascending("pos_doc")));
pipeline.add(Aggregates.group("$pages._id", grouper.getGrouper(
Accumulators.first("initid", "$_id"),
Accumulators.first("page_order", "$page_order"),
Accumulators.push("temp_doc", "$pages.docs"))));
pipeline.add(Aggregates.graphLookup("Pages", "$pages._id", "parents.0", "_id", "pages.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$pages.parents", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("depth")));
pipeline.add(Aggregates.sort(new Json("pages.parents.depth", -1)));
pipeline.add(Aggregates.group("$pages._id", grouper.getGrouper(
Accumulators.first("initid", "$initid"),
Accumulators.first("page_order", "$page_order"),
Accumulators.first("temp_doc", "$temp_doc"),
Accumulators.push("urls_pages", "$pages.parents.url")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("initid", true)
.put("page_order", true)
.put("urls_pages", true)
.put("temp_doc",
new Json("$arrayElemAt", Arrays.asList(
new Json("$filter", new Json("input", "$temp_doc").put("as", "temp_doc").put("cond", new Json("$eq", Arrays.asList(new Json("$substr", Arrays.asList("$$temp_doc.type", 0, "image/".length())), "image/"))))
, 0))
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("initid", true)
.put("pages", new Json("id", "$pages._id")
.put("order", new Json("$indexOfArray", Arrays.asList("$page_order", "$pages._id")))
.put("title", "$pages.title")
.put("intro", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$pages.intro", "")), null, "$pages.intro")))
.put("logo", new Json("$concat", Arrays.asList(Settings.getCDNHttp() + "/files/", "$temp_doc._id")))
.put("url", new Json("$reduce", new Json("input", "$urls_pages").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this"))))))
));
pipeline.add(Aggregates.sort(Sorts.ascending("pages.order")));
pipeline.add(Aggregates.group("$initid", grouper.getGrouper(
Accumulators.push("pages", "$pages")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("pages",
new Json("$filter", new Json("input", "$pages").put("as", "pages").put("cond", new Json("$ne", Arrays.asList("$$pages.id", new BsonUndefined()))))
)
));
//pages
pipeline.add(Aggregates.lookup("Posts", "_id", "thread", "posts"));
pipeline.add(Aggregates.unwind("$posts", new UnwindOptions()));
pipeline.add(Aggregates.unwind("$posts.docs", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("pos_doc")));
pipeline.add(Aggregates.lookup("BlobFiles", "posts.docs", "_id", "posts.docs"));
pipeline.add(Aggregates.unwind("$posts.docs", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(Sorts.ascending("pos_doc")));
pipeline.add(Aggregates.group(new Json("posts_id", "$posts._id").put("id", "$_id"), grouper.getGrouper(Accumulators.push("posts_docs", "$posts.docs"))));
pipeline.add(Aggregates.project(grouper.getProjection().put("posts_docs", true).put("_id", "$_id.id")));
pipeline.add(Aggregates.unwind("$posts.comments", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("idc")));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("user", new Json("id", "$posts.user.id").put("name", "$posts.user.name").put("count", "$posts.user.count").put("avatar",
new Json("$concat", Arrays.asList(Settings.getCDNHttp(),
new Json("$cond",
Arrays.asList(new Json("$eq", Arrays.asList("$posts.user.avatar", new BsonUndefined())),
"/ui/logo",
new Json("$concat", Arrays.asList("/files/", "$posts.user.avatar"))))
))
))
.put("posts",
postproject.clone()
.put("user", new Json("id", "$posts.user.id").put("name", "$posts.user.name").put("count", "$posts.user.count").put("avatar",
new Json("$concat", Arrays.asList(Settings.getCDNHttp(),
new Json("$cond",
Arrays.asList(new Json("$eq", Arrays.asList("$posts.user.avatar", new BsonUndefined())),
"/ui/logo",
new Json("$concat", Arrays.asList("/files/", "$posts.user.avatar"))))
))
))
.put("coins",
new Json("$cond", Arrays.asList(new Json("$ne", Arrays.asList("$posts.coins", new BsonUndefined())), new Json("$size", "$posts.coins"), 0))
)
.put("docs", "$posts_docs")
.put("update", new Json("$arrayElemAt", Arrays.asList("$posts.changes.date", 0)))
.put("changes", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$posts.changes", new BsonUndefined())), 0, new Json("$size", "$posts.changes"))))
.put("id", "$posts._id")
.put("comments",
new Json()
.put("user", new Json("id", "$posts.user.id").put("name", "$posts.comments.user.name").put("count", "$posts.comments.user.count").put("avatar",
new Json("$concat", Arrays.asList(Settings.getCDNHttp(),
new Json("$cond",
Arrays.asList(new Json("$eq", Arrays.asList("$posts.comments.user.avatar", new BsonUndefined())),
"/ui/logo",
new Json("$concat", Arrays.asList("/files/", "$posts.comments.user.avatar"))))
))
))
.put("text", true).put("date", true).put("index", "$idc").put("remove", true)
.put("changes", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$posts.comments.changes", new BsonUndefined())), 0, new Json("$size", "$posts.comments.changes"))))
))
));
pipeline.add(Aggregates.sort(new Json("post.comments.index", 1)));
pipeline.add(Aggregates.group(new Json("posts_id", "$posts.id").put("_id", "$_id"), grouper.getGrouper(Accumulators.push("posts_comments", "$posts.comments"))));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("posts", postproject.clone()
.put("docs", new Json("_id", true).put("type", true).put("size", true).put("text", true))
.put("comments", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList(new Json("$arrayElemAt", Arrays.asList("$posts_comments.date", 0)), new BsonUndefined())), new ArrayList<>(), "$posts_comments"))))
));
if (!remove) {
pipeline.add(Aggregates.match(Filters.exists("posts.remove", false)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("posts", postproject.clone()
.put("comments",
new Json("$filter", new Json("input", "$posts.comments").put("as", "posts_comments").put("cond", new Json("$eq", Arrays.asList("$$posts_comments.remove", new BsonUndefined()))))
))
));
}
pipeline.add(Aggregates.sort(new Json("posts.date", 1)));
pipeline.add(Aggregates.group("$_id._id", grouper.getGrouper(Accumulators.push("posts", "$posts"))));
//</deleted items>
pipeline.add(Aggregates.project(grouper.getProjection()
.put("parent", new Json("$arrayElemAt", Arrays.asList(
new Json("$split", Arrays.asList(
new Json("$arrayElemAt", Arrays.asList(
new Json("$split", Arrays.asList(new Json("$arrayElemAt", Arrays.asList("$parents", 0)), "Forums("))
, 1)), ")")), 0
))
)
));
pipeline.add(Aggregates.graphLookup("Forums", "$parent", "parents.0", "_id", "breadcrumb", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$breadcrumb", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("breadcrumb.depth", -1)));
pipeline.add(Aggregates.group("$_id", grouper.getGrouper(
Accumulators.first("url", new Json("$arrayElemAt", Arrays.asList("$url", 0))),
Accumulators.push("breadcrumb", "$breadcrumb"),
Accumulators.push("urls", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.url", 0))))));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("breadcrumb", new Json().put("_id", true).put("title", true).put("url", true).put("intro", true).put("parents", true))
.put("url", new Json("$concat", Arrays.asList(new Json("$reduce", new Json("input", "$urls").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))),
new Json("$cond", Arrays.asList(new Json("$lt", Arrays.asList("$replies", 1)), "/noreply/", "/")), "$_id"))
)
.put("parents",
new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList("$parents", null)), new ArrayList<>(), "$parents"))
)
));
/////
pipeline.add(Aggregates.unwind("$breadcrumb", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("position")));
pipeline.add(Aggregates.graphLookup("Forums", "$breadcrumb._id", "parents.0", "_id", "breadcrumb.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$breadcrumb.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("breadcrumb.parents.depth", -1)));
pipeline.add(Aggregates.group(new Json("breadcrumb__id", "$breadcrumb._id").put("_id", "$_id"), grouper.getGrouper(
Accumulators.first("position", "$position"),
Accumulators.first("breadcrumb_url", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.url", 0))),
Accumulators.push("urls_", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.parents.url", 0))))));
pipeline.add(Aggregates.sort(new Json("position", 1)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("breadcrumb", new Json().put("title", true).put("_id", true)
.put("url", new Json("$reduce", new Json("input", "$urls_").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this"))))))
));
pipeline.add(Aggregates.group("$_id._id", grouper.getGrouper(Accumulators.push("breadcrumb", "$breadcrumb"))));
/////
pipeline.add(Aggregates.project(grouper.getProjection()
.put("date", new Json("$arrayElemAt", Arrays.asList("$posts.date", 0)))
.put("last", new Json("$arrayElemAt", Arrays.asList("$posts.date", new Json("$subtract", Arrays.asList(new Json("$size", "$posts"), 1)))))
));
pipeline.add(Aggregates.project(
new Json("_id", false)
.put("id", "$_id")
.put("remove", "$remove")
.put("title", "$title")
.put("user", "$user")
.put("sysid", "$sysid")
.put("date", "$date")
.put("last", "$last")
.put("url", "$url")
.put("replies", "$replies")
.put("posts", "$posts")
.put("parents", "$parents")
.put("pages", "$pages")
.put("breadcrumb", "$breadcrumb")
));
return Db.aggregate("Threads", pipeline).first();
}
public static Json getPost(String _id, Users user, boolean remove) {
Json result = getSimplePost(_id, user, remove);
if (result == null) {
return null;
}
result.put("breadcrumb", result.getListJson("breadcrumb"));
Json forum = new Json().put("parents", new ArrayList<>()).put("childrens", new ArrayList<>()).put("sisters", new ArrayList<>()).put("branche", new ArrayList<>()).put("pages", new ArrayList<>());
List<Json> post_parents = new ArrayList<>();
List<String> forums = new ArrayList<>();
for (String parent : result.getList("parents")) {
parent = parent.replaceAll("Forums\\(([^)]+)\\)", "$1");
forums.add(parent);
Json forumitem = ForumsAggregator.getForum(Filters.eq("_id", parent), user);
post_parents.add(new Json("id", forumitem.getId()).put("title", forumitem.getString("title")).put("url", forumitem.getString("url")));
if (!forum.containsKey("breadcrumb")) {
forum.put("breadcrumb", forumitem.getListJson("breadcrumb"));
}
List<Json> parents_forum = forum.getListJson("parents");
for (Json parents : forumitem.getListJson("parents")) {
if (!parents_forum.contains(parents)) {
parents_forum.add(parents);
}
}
forum.put("parents", parents_forum);
List<Json> childrens_forum = forum.getListJson("childrens");
for (Json childrens : forumitem.getListJson("childrens")) {
if (!childrens_forum.contains(childrens)) {
childrens_forum.add(childrens);
}
}
forum.put("childrens", childrens_forum);
List<Json> sisters_forum = forum.getListJson("sisters");
for (Json sisters : forumitem.getListJson("sisters")) {
if (!sisters_forum.contains(sisters)) {
sisters_forum.add(sisters);
}
}
forum.put("sisters", sisters_forum);
List<String> branche_forum = forum.getList("branche");
if (forumitem.getList("branche") != null) {
for (String branche : forumitem.getList("branche")) {
if (!branche_forum.contains(branche)) {
branche_forum.add(branche);
}
}
forum.put("branche", branche_forum);
}
List<Json> pages_forum = forum.getListJson("pages");
for (Json pages : forumitem.getListJson("pages")) {
if (!pages_forum.contains(pages)) {
pages_forum.add(pages);
}
}
forum.put("pages", pages_forum);
}
result.put("parents", post_parents);
List<Bson> filters = new ArrayList<>();
filters.add(Filters.ne("_id", result.getString("id")));
result.put("forum", forum);
if (forum.getList("branche").size() > 0) {
filters.add(Filters.in("parents", forum.getList("branche")));
filters.add(Filters.gt("replies", 0));
Json branche = PostsAggregator.getPosts(Filters.and(filters), 40, null, false);
result.put("branche", branche);
}
forum.remove("branche");
return result;
}
public static Json getPosts(Bson filter, int limit) {
return getPosts(filter, limit, null, false);
}
public static Json getPosts(Bson filter, int limit, String next_str, boolean remove) {
Paginer paginer = new Paginer(next_str, new Json("last", -1));
List<Bson> pipeline = new ArrayList<>();
List<Bson> filters = new ArrayList<>();
Aggregator grouper = new Aggregator("id", "remove", "title", "date", "last", "last_date", "user", "replies", "url", "breadcrumb");
if (filter == null) {
filters.add(Filters.regex("parents", "^Forums\\("));
} else {
filters.add(filter);
}
if (!remove) {
filters.add(Filters.eq("remove", null));
}
Bson next = paginer.getPaging();
if (next != null) {
filters.add(next);
}
if (filters.size() > 0) {
pipeline.add(Aggregates.match(Filters.and(filters)));
}
pipeline.add(Aggregates.sort(paginer.getFirstSort()));
pipeline.add(Aggregates.limit(limit + 2));
pipeline.add(Aggregates.project(grouper.getProjection().put("parents", true).put("last_date", "$last")));
List<Bson> express = new ArrayList<>();
express.add(new Json("$eq", Arrays.asList("$thread", "$$id")));
if (!remove) {
express.add(new Json("$eq", Arrays.asList("$remove", new BsonUndefined())));
}
pipeline.add(new Json("$lookup",
new Json()
.put("from", "Posts")
.put("let", new Json("id", "$_id"))
.put("pipeline", Arrays.asList(
Aggregates.match(new Json("$expr", Filters.and(express))),
Aggregates.sort(new Json("date", -1)),
Aggregates.limit(1)
))
.put("as", "last")
));
pipeline.add(Aggregates.project(grouper.getProjection().put("parents", true).put("last", new Json("$arrayElemAt", Arrays.asList("$last", 0)))));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("parent", new Json("$arrayElemAt", Arrays.asList(
new Json("$split", Arrays.asList(
new Json("$arrayElemAt", Arrays.asList(
new Json("$split", Arrays.asList(new Json("$arrayElemAt", Arrays.asList("$parents", 0)), "Forums("))
, 1)), ")")), 0
))
)
.put("last", new Json()
.put("date", true)
.put("_id", true)
.put("user", new Json("id", "$last.user.id").put("name", "$last.user.name").put("count", "$last.user.count").put("avatar",
new Json("$concat", Arrays.asList(Settings.getCDNHttp(),
new Json("$cond",
Arrays.asList(new Json("$eq", Arrays.asList("$last.user.avatar", new BsonUndefined())),
"/ui/logo",
new Json("$concat", Arrays.asList("/files/", "$last.user.avatar"))))
))
))
)
));
pipeline.add(Aggregates.graphLookup("Forums", "$parent", "parents.0", "_id", "breadcrumb", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.project(grouper.getProjection().put("urls", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.url", 0)))));
pipeline.add(Aggregates.unwind("$breadcrumb", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("position")));
pipeline.add(Aggregates.graphLookup("Forums", "$breadcrumb._id",
"parents.0", "_id", "breadcrumb.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$breadcrumb.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("breadcrumb.parents.depth", -1)));
pipeline.add(Aggregates.group(new Json("breadcrumb_id", "$breadcrumb._id").put("_id", "$_id"),
grouper.getGrouper(Accumulators.push("breadcrumb_urls", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.parents.url", 0))))
));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("urls", true)
.put("breadcrumb",
new Json().put("id", "$breadcrumb._id").put("title", "$breadcrumb.title")
.put("depth", true)
.put("urlinit", "$breadcrumb.url")
.put("url", new Json("$reduce", new Json("input", "$breadcrumb_urls")
.put("initialValue", "").put("in", new Json("$concat", Arrays.asList(
"$$value", "/", "$$this"))))
)
)
));
pipeline.add(Aggregates.sort(new Json("breadcrumb.depth", -1)));
pipeline.add(Aggregates.group("$_id._id", grouper.getGrouper(
Accumulators.push("urls", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.urlinit", 0))),
Accumulators.push("breadcrumb", "$breadcrumb")
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("url", new Json("$concat", Arrays.asList(
new Json("$reduce", new Json("input", "$urls").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))),
new Json("$cond", Arrays.asList(new Json("$lte", Arrays.asList("$replies", 0)), "/noreply/", "/")), "$_id"
)))
.put("user", new Json("id", "$user.id").put("name", "$user.name").put("count", "$user.count").put("avatar",
new Json("$concat", Arrays.asList(Settings.getCDNHttp(),
new Json("$cond",
Arrays.asList(new Json("$eq", Arrays.asList("$user.avatar", new BsonUndefined())),
"/ui/logo",
new Json("$concat", Arrays.asList("/files/", "$user.avatar"))))
))
))
.put("breadcrumb", new Json("$cond", Arrays.asList(new Json("$eq", Arrays.asList(new Json("$arrayElemAt", Arrays.asList("$breadcrumb.title", 0)), new BsonUndefined())), null, "$breadcrumb")))
));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("breadcrumb", new Json("title", true).put("id", true).put("url", true))
));
pipeline.add(Aggregates.sort(Sorts.descending("last_date")));
pipeline.add(Aggregates.project(grouper.getProjectionOrder().remove("last_date")));
return paginer.getResult(Db.aggregate("Threads", pipeline).into(new ArrayList<>()), limit);
}
public static List<Json> sitemap() {
List<Bson> pipeline = new ArrayList<>();
Aggregator grouper = new Aggregator("id", "update", "replies", "parents", "url", "breadcrumb");
pipeline.add(Aggregates.match(Filters.and(Filters.ne("replies", 0), Filters.exists("remove", false))));
pipeline.add(Aggregates.sort(Sorts.orderBy(Sorts.descending("update"), Sorts.ascending("_id"))));
pipeline.add(Aggregates.limit(10000));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("parent", new Json("$arrayElemAt", Arrays.asList(
new Json("$split", Arrays.asList(
new Json("$arrayElemAt", Arrays.asList(
new Json("$split", Arrays.asList(new Json("$arrayElemAt", Arrays.asList("$parents", 0)), "Forums("))
, 1)), ")")), 0
))
)
));
pipeline.add(Aggregates.graphLookup("Forums", "$parent", "parents.0", "_id", "breadcrumb", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.project(grouper.getProjection().put("urls", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.url", 0)))));
pipeline.add(Aggregates.unwind("$breadcrumb", new UnwindOptions().preserveNullAndEmptyArrays(true).includeArrayIndex("position")));
pipeline.add(Aggregates.graphLookup("Forums", "$breadcrumb._id",
"parents.0", "_id", "breadcrumb.parents", new GraphLookupOptions().depthField("depth").maxDepth(50)));
pipeline.add(Aggregates.unwind("$breadcrumb.parents", new UnwindOptions().preserveNullAndEmptyArrays(true)));
pipeline.add(Aggregates.sort(new Json("breadcrumb.parents.depth", -1)));
pipeline.add(Aggregates.group(new Json("breadcrumb_id", "$breadcrumb._id").put("_id", "$_id"),
grouper.getGrouper(Accumulators.push("breadcrumb_urls", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.parents.url", 0))))
));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("urls", true)
.put("breadcrumb",
new Json()
.put("depth", true)
.put("urlinit", "$breadcrumb.url")
.put("url", new Json("$reduce", new Json("input", "$breadcrumb_urls")
.put("initialValue", "").put("in", new Json("$concat", Arrays.asList(
"$$value", "/", "$$this"))))
)
)
));
pipeline.add(Aggregates.sort(new Json("breadcrumb.depth", -1)));
pipeline.add(Aggregates.group("$_id._id", grouper.getGrouper(
Accumulators.push("urls", new Json("$arrayElemAt", Arrays.asList("$breadcrumb.urlinit", 0)))
)));
pipeline.add(Aggregates.project(grouper.getProjection()
.put("url", new Json("$concat", Arrays.asList(
Settings.getFullHttp(),
new Json("$reduce", new Json("input", "$urls").put("initialValue", "").put("in", new Json("$concat", Arrays.asList("$$value", "/", "$$this")))),
new Json("$cond", Arrays.asList(new Json("$lte", Arrays.asList("$replies", 0)), "/noreply/", "/")), "$_id"
)))
));
pipeline.add(Aggregates.project(
new Json("_id", false).put("lastmod", "$update").put("loc", "$url")
));
pipeline.add(Aggregates.sort(Sorts.orderBy(Sorts.descending("lastmod"))));
return Db.aggregate("Threads", pipeline).into(new ArrayList<>());
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment