This is the main calculation class for annotation. Based on what user searches for, it will create a Personal
+ * knowledge graph (PKG) system for all users in the same group. The system can gather important entities that each
+ * user has looked into to show the interests of members in group, as well as creating recommendations for others.
+ *
Basic structure of Pkg class:
+ *
+ *
A graph represented by list of nodes and edges
+ *
A list of recognized entities retrieved from Database
+ *
+ *
The workflow is conceptually described as follows:
1 - createPkg() initializes the Pkg, gets all recognized entities from DB and makes them as nodes.
+ *
2 - Which query the user searches for, clicks on which links and vice versa, will be fed from
+ * SearchBean.commandOnResourceClick() and SearchBean.destroy() to dbpedia-spotlight api in class NERParser.
+ *
3 - class NERParser will annotate the input content with its source (from group description, user profile or search),
+ * return as a list of recognized entities and store it in DB (learnweb_annotations.annotationCount). The input is also
+ * stored in another DB table.
+ *
4 - Pkg then updates by receiving those entities as nodes - with function updatePkg().
+ *
5 - Shared objects will be the product of Pkg system. Before creating shared objects, Pkg will merge all duplicating
+ * nodes and edges, then calculates every node based on their connections.
+ *
6 - Depending on user's purpose, Pkg then exports the shared objects for that purpose, e.g. top 3 entities with the
+ * highest weights for the collaborative graph, or 5 for recommender system etc.
+ */
+public final class PKGraph {
+ private static final Logger log = LogManager.getLogger(PKGraph.class);
+ private static final Pattern PATTERN = Pattern.compile("http://dbpedia.org/resource/");
+
+ private int userId;
+ protected RdfModel rdfGraph;
+ private List nodes = new ArrayList<>();
+ private List links = new ArrayList<>();
+ private transient List recognisedEntities;
+ private transient HashMap results;
+
+ private PKGraph() {
+ }
+
+ private void setLink(int source, int target, double weight) {
+ links.add(new Link(source, target, weight));
+ }
+
+ /**
+ * Add this node into the list of nodes.
+ *
+ * @param id the new node's id from DB
+ * @param uri the new node's uri
+ * @param userId the user's id.
+ * @param confidence the new node's confidence
+ * @param sessionId the new node's session id
+ * @param weight the weight of the new node (can actually be excluded in future updates)
+ * @param type the type of the new node.
+ * @param date the created time of the node.
+ */
+ private void addNode(int id, String uri, int userId, double confidence, double weight, String sessionId, String type, LocalDateTime date) {
+ //Get the Node name as uri minus domain root - dbpedia.org/resource
+ String nameQuery = PATTERN.matcher(uri).replaceAll("").replaceAll("_", " ");
+
+ Node node = new Node(id, nameQuery, uri, userId, weight, confidence, sessionId, type, date);
+ if (!nodes.contains(node)) {
+ nodes.add(node);
+ }
+ }
+
+ /**
+ * Add one RDF statement to the user's RDF graph.
+ *
+ * @param subject the statement's subject
+ * @param pre the statement's predicate
+ * @param object the statement's object
+ * @param mode either "literal" or "resource"
+ */
+ public void addRdfStatement(String subject, String pre, String object, String mode) {
+ rdfGraph.addStatement(subject, pre, object, mode);
+ }
+
+ private void addSearchSessionStatement(User user) {
+ for (SearchSession session : dao().getSearchHistoryDao().findSessionsByUserId(user.getId())) {
+ addRdfStatement("SearchSession/" + session.getSessionId(), "schema:startTime", session.getStartTimestamp().format(DateTimeFormatter.ISO_DATE), "literal");
+ addRdfStatement("SearchSession/" + session.getSessionId(), "schema:endTime", session.getEndTimestamp().format(DateTimeFormatter.ISO_DATE), "literal");
+ for (SearchQuery query : session.getQueries()) {
+ addRdfStatement("SearchSession/" + session.getSessionId(), RdfModel.prefixBase + "contains", "SearchQuery/" + query.searchId(), "resource");
+ addRdfStatement("SearchQuery/" + query.searchId(), RdfModel.prefixBase + "query", query.query(), "literal");
+ addRdfStatement("SearchQuery/" + query.searchId(), "schema:dateCreated", query.timestamp().format(DateTimeFormatter.ISO_DATE), "literal");
+ }
+ }
+ }
+
+ /**
+ * Calculate the weight to be connected from a node with the function values based on the algorithm.
+ *
+ * @param date the date of the entity's creation
+ * @param type the type of this entity
+ * @return the weight of this entity, based on its group type and how many days since the input into DB
+ */
+ private double calculateWeight(LocalDateTime date, String type) {
+ int days = (int) ChronoUnit.DAYS.between(LocalDateTime.now(), date);
+ switch (type) {
+ case "user" -> {
+ return 3 * Math.exp(days);
+ }
+ case "group" -> {
+ return 1 * Math.exp(days);
+ }
+ case "web" -> {
+ return 1 * Math.exp(days);
+ }
+ case "snippet_clicked" -> {
+ return 4 * Math.exp(days);
+ }
+ case "query" -> {
+ return 11 * Math.exp(days);
+ }
+ case "snippet_not_clicked" -> {
+ return -0.6 * Math.exp(days);
+ }
+ default -> {
+ }
+ }
+ return 0;
+ }
+
+ /**
+ *
Merge the duplicating nodes (same uri) and their corresponding links.
+ * Two nodes with the same uri are called duplicates, so the function will remove one of
+ * the two duplicates in each pair of nodes. The remaining gets its usernames, types and session id
+ * combined from both of them.
+ *
The function then creates the links between each of these nodes. Based on which types they have in common,
+ * the link between them will get their weight calculated based on the formula in calculateWeight(date, type).
+ * The nodes that have no connections will then be connected to the DEFAULT node.
+ */
+ public void removeDuplicatingNodesAndLinks() {
+ //Remove duplicating nodes by merging nodes with the same uri
+ for (int i = 0; i < nodes.size() - 1; i++) {
+ if (!nodes.get(i).getUri().isEmpty()) {
+ for (int j = i + 1; j < nodes.size(); j++) {
+ if (nodes.get(i).getUri().equals(nodes.get(j).getUri())) {
+ //Join the users and sessionId of the first node
+ List types = new ArrayList<>(Arrays.stream(nodes.get(i).getType().split(",")).toList());
+ types.removeAll(Arrays.stream(nodes.get(j).getType().split(",")).toList());
+ types.addAll(Arrays.stream(nodes.get(j).getType().split(",")).toList());
+
+ nodes.get(i).combineUsers(nodes.get(j).getSessionId());
+ nodes.get(i).setType(String.join(",", types));
+ //Remove the duplicating node
+ nodes.remove(j);
+ j--;
+ nodes.get(i).increaseFrequency();
+ }
+ }
+ }
+ }
+ for (int i = 1; i < nodes.size() - 1; i++) {
+ boolean isUnique = true;
+ for (int j = i + 1; j < nodes.size(); j++) {
+ List commonTypes = Arrays.stream(nodes.get(i).getType().split(",")).filter(
+ Arrays.stream(nodes.get(j).getType().split(",")).toList()::contains).toList();
+ if (!commonTypes.isEmpty()) {
+ double weight = 0;
+ isUnique = false;
+ for (String s : commonTypes) {
+ weight += calculateWeight(nodes.get(i).getDate(), s);
+ }
+ setLink(i, j, weight);
+ }
+ }
+ if (isUnique) {
+ setLink(0, i, calculateWeight(nodes.get(i).getDate(), nodes.get(i).getType()));
+ }
+ }
+
+ // recalculate the weight of each node
+ calculateSumWeight();
+ }
+
+ /**
+ * Initializes the PKG for all users in the specific group.
+ *
+ * @param user the current User
+ */
+ public static PKGraph createPkg(User user) {
+ PKGraph pkg = new PKGraph();
+ pkg.userId = user.getId();
+ // Get the entities from DB for this user
+ pkg.recognisedEntities = dao().getCollabGraphDao().findEntityByUser(user.getId());
+
+ if (pkg.recognisedEntities.size() == 1) {
+ // Add the default node. Any group that has only 1 node will be connected to the default node
+ pkg.addNode(0, "default", 0, 1, 0.0, "", "", null);
+ }
+
+ // New RDF-Model initialization
+ pkg.rdfGraph = new RdfModel();
+ // Initialize rdf graph model
+ Optional rdfObject = dao().getCollabGraphDao().findRdfById(user.getId());
+ if (rdfObject.isPresent()) {
+ pkg.rdfGraph.makeModelFromString(rdfObject.get().getRdfValue());
+ } else {
+ //If the user belongs to a group
+ for (Group group : user.getGroups()) {
+ pkg.rdfGraph.addGroup(user, group);
+ }
+ }
+
+ // Add statements which search sessions are the subject
+ pkg.addSearchSessionStatement(user);
+ for (RecognisedEntity recognisedEntity : pkg.recognisedEntities) {
+ // Call the DB update here
+ pkg.updatePkg(recognisedEntity);
+ }
+
+ pkg.removeDuplicatingNodesAndLinks();
+ log.info("PKG created for user {}", user.getUsername());
+ return pkg;
+ }
+
+ /**
+ * Add RDF-statements to this user's RDF graph based on the parameters from the entity.
+ * Calls after DBPedia-spotlight is used
+ *
+ * @param recognisedEntity the entity
+ * @param user the current user
+ * @param session the user's current search session
+ */
+ public void updateRdfModel(RecognisedEntity recognisedEntity, User user, String session) {
+ //----------------------------------Rdf-insert-model--------------------------------------
+ Pattern keywordPattern = Pattern.compile("" + "(.*?)" + "");
+ Pattern headlinePattern = Pattern.compile("" + "(.*?)" + "");
+ //Get the session id list from entity
+ // List searchIds = dao().getCollabGraphDao().findSearchIdByResult(annotationCount.getUriId());
+
+ if (recognisedEntity.getType().contains("snippet")) {
+ addRdfStatement("SearchSession/" + session, "contains", "Snippet/" + recognisedEntity.getUriId(), "resource");
+ } else if ("web".equals(recognisedEntity.getType())) {
+ addRdfStatement("SearchSession/" + session, "contains", "schema:WebPage/" + recognisedEntity.getUriId(), "resource");
+ }
+ addRdfStatement("educor:User/" + user.getId(), "educor:generatesLogs", "SearchSession/" + session, "resource");
+
+ //Input stream statements
+ if (recognisedEntity.getInputStreams() != null) {
+ List inputStreamRdfs = dao().getCollabGraphDao().findInputContentById(recognisedEntity.getInputStreams());
+ for (InputStreamRdf inputRdf : inputStreamRdfs) {
+ //Add createsInputStream statement based on the entities' type
+ if (inputRdf.getUserId() == user.getId()) {
+ if ("user".equals(recognisedEntity.getType())) {
+ addRdfStatement("educor:UserProfile/" + user.getId(), "createsInputStream", "InputStream/" + inputRdf.getId(), "resource");
+ } else if ("group".equals(recognisedEntity.getType())) {
+ addRdfStatement("foaf:Group/" + inputRdf.getObjectId(), "createsInputStream", "InputStream/" + inputRdf.getId(), "resource");
+ } else {
+ addRdfStatement("SearchSession/" + session, "createsInputStream", "InputStream/" + inputRdf.getUserId(), "resource");
+ }
+
+ addRdfStatement("InputStream/" + inputRdf.getId(), "schema:text", inputRdf.getContent(), "literal");
+ addRdfStatement("InputStream/" + inputRdf.getId(), "schema:dateCreated", inputRdf.getDateCreated().toString(), "literal");
+ addRdfStatement("RecognizedEntities/" + PATTERN.matcher(recognisedEntity.getUri()).replaceAll(""), "processes", "InputStream/" + inputRdf.getId(), "resource");
+ if ("web".equals(recognisedEntity.getType())) {
+ Matcher matcher = keywordPattern.matcher(inputRdf.getContent());
+ while (matcher.find()) {
+ addRdfStatement("WebPage/" + recognisedEntity.getUriId(), "keywords", matcher.group(1), "literal");
+ }
+ matcher = headlinePattern.matcher(inputRdf.getContent());
+ while (matcher.find()) {
+ addRdfStatement("WebPage/" + recognisedEntity.getUriId(), "headline", matcher.group(1), "literal");
+ }
+ }
+ }
+ }
+ }
+ //--------------------------------RDF-Insert-Model-End----------------------------------
+ }
+
+ /**
+ * Add this recognized entity into the node list as a Node.
+ *
+ * @param entity The recognized entity to be added to the PKG
+ */
+ public void updatePkg(RecognisedEntity entity) {
+ addNode(entity.getUriId(), entity.getUri(), entity.getUserId(), entity.getConfidence(),
+ Precision.round(calculateWeight(entity.getCreatedAt(), entity.getType()), 3), entity.getSessionId(),
+ entity.getType(), entity.getCreatedAt());
+ }
+
+ /**
+ * Calculate the sum_weight of each node with the formula of NEA.
+ */
+ public void calculateSumWeight() {
+ //latch.await();
+ results = new HashMap<>();
+ for (int i = 1; i < nodes.size() - 1; i++) {
+ double weight = 0;
+ double sumConfidence = 0;
+ for (Link link : links) {
+ if (link.source == i || link.target == i) {
+ weight += link.weight;
+ sumConfidence += link.source == i ? (link.target == 0 ? 0 : nodes.get(link.target).getConfidence()) : (link.source == 0 ? 0 : nodes.get(link.source).getConfidence());
+ }
+ }
+ results.put(i, nodes.get(i).getConfidence() * sumConfidence * weight);
+ }
+ }
+
+ /**
+ * Create a shared object for the single graph of the current user, which contains nodes from 3 different sources (user, group and session).
+ * Usually, each source will have a maximum of 10 nodes.
+ *
+ * @return the shared object of a single graph
+ */
+ public JsonSharedObject createSingleGraph() {
+ if (!dao().getUserDao().isActiveUser(userId)) {
+ return null;
+ }
+
+ JsonSharedObject object = new JsonSharedObject("singleGraph", false);
+ List newNodes = new ArrayList<>();
+
+ Map typeMap = new HashMap<>();
+ //HARDCODED lines - need alternatives
+ typeMap.put("user", "user");
+ typeMap.put("group", "group");
+ typeMap.put("snippet_not_clicked", "session");
+ typeMap.put("snippet_clicked", "session");
+ typeMap.put("query", "session");
+ typeMap.put("web", "session");
+
+ Map occurrences = new HashMap<>();
+ occurrences.put("user", 0);
+ occurrences.put("group", 0);
+ occurrences.put("session", 0);
+
+ List> entries = new ArrayList<>(results.entrySet());
+ entries.sort((o1, o2) -> o2.getValue().compareTo(o1.getValue()));
+ //Find occurrences of each source as nodes - if the total of one source exceeds 10 then stop
+ for (Map.Entry entry : entries) {
+ Node node = nodes.get(entry.getKey());
+ for (Map.Entry type : typeMap.entrySet()) {
+ if (occurrences.get(type.getValue()) < 10 && node.getType().contains(type.getKey())) {
+ if (object.getEntities().stream().noneMatch(s -> s.getType().equals(type.getValue()) && s.getUri().equals(node.getUri()))) {
+ newNodes.add(node);
+ object.getEntities().add(new JsonSharedObject.Entity(node.getUri(), node.getName(), entry.getValue(), type.getValue(), node.getId()));
+ occurrences.put(type.getValue(), occurrences.get(type.getValue()) + 1);
+ }
+ }
+ }
+ }
+ //Create links
+ for (int i = 0; i < newNodes.size() - 1; i++) {
+ for (int j = i + 1; j < newNodes.size(); j++) {
+ Node node1 = newNodes.get(i);
+ Node node2 = newNodes.get(j);
+ if (Arrays.stream(node1.getType().split(",")).anyMatch(node2.getType()::contains)) {
+ object.getLinks().add(new JsonSharedObject.Link(i, j));
+ }
+ }
+ }
+ //Add this new graph (newLinks, newNodes) into a shared object, so that it can be stored later on
+ return object;
+ }
+
+ /**
+ * Create a shared object based on the result of pkg graph calculation.
+ *
+ * @param numberPositive how many positive entities the shared Object will pass
+ * @param numberNegative how many negative entities the shared Object will pass
+ * @return the list of shared object in Json form
+ */
+ public JsonSharedObject prepareCollabRec(int numberPositive, int numberNegative) {
+ calculateSumWeight();
+
+ JsonSharedObject sharedObject = new JsonSharedObject("CollabRec", false);
+ if (results == null || results.isEmpty()) {
+ log.info("No weight calculated");
+ return null;
+ }
+
+ List> choosenEntries = new ArrayList<>();
+ if (results.size() < numberPositive + numberNegative) {
+ choosenEntries.addAll(results.entrySet());
+ } else {
+ Comparator> cmp = Map.Entry.comparingByValue();
+ List> entries = new ArrayList<>(results.entrySet());
+ entries.sort(cmp.reversed());
+
+ int limit = 0;
+ for (Map.Entry entry : entries) {
+ choosenEntries.add(entry);
+ if (++limit >= numberPositive) {
+ break;
+ }
+ }
+
+ entries.sort(cmp);
+ limit = 0;
+ for (Map.Entry entry : entries) {
+ choosenEntries.add(entry);
+ if (++limit >= numberNegative) {
+ break;
+ }
+ }
+ }
+
+ for (Map.Entry entry : choosenEntries) {
+ Node chosenNode = nodes.get(entry.getKey());
+ sharedObject.getEntities().add(new JsonSharedObject.Entity(chosenNode.getUri(), chosenNode.getName(), entry.getValue(), chosenNode.getType(), chosenNode.getId()));
+ }
+
+ return sharedObject;
+ }
+
+ /**
+ * Create a shared object based on the result of pkg graph calculation.
+ *
+ * @param user the current user
+ * @param groupId The group id
+ * @param numberEntities how many entities per user the shared Object will show
+ * @param isAscending show if the sharedObject will get the result from top or bottom
+ * @param application the application of this shared object (can be "recommendation", "collabGraph" or "negative" + "positive"
+ * @return the list of shared object in Json form
+ */
+ public JsonSharedObject createSharedObject(User user, int groupId, int numberEntities, boolean isAscending, String application) {
+ //Initialization
+ //The list to be returned
+ JsonSharedObject sharedObject = new JsonSharedObject(application, false);
+ List newNodes;
+ if (results == null) {
+ return null;
+ }
+
+ //Sort the calculated results to get entities' ranking
+ List> entries = new ArrayList<>(results.entrySet());
+ Comparator> cmp = Map.Entry.comparingByValue();
+ if (isAscending) {
+ entries.sort(cmp);
+ } else {
+ entries.sort(cmp.reversed());
+ }
+
+ int index = 0;
+ //Choose only the active users to create the shared object
+ if (!dao().getUserDao().isActiveUser(user.getId())) {
+ return null;
+ } else {
+ newNodes = new ArrayList<>();
+ for (Map.Entry entry : entries) {
+ //Find from the top of the results numberTopEntities entities, break after reaching the number
+ Node chosenNode = nodes.get(entry.getKey());
+ newNodes.add(chosenNode);
+ sharedObject.getEntities().add(new JsonSharedObject.Entity(chosenNode.getUri(), chosenNode.getName(), entry.getValue(), chosenNode.getType(), chosenNode.getId()));
+ index++;
+ if (index >= numberEntities) {
+ break;
+ }
+ }
+
+ //Links initialization
+ for (int i = 0; i < newNodes.size() - 1; i++) {
+ for (int j = i + 1; j < newNodes.size(); j++) {
+ Set result = Arrays.stream(newNodes.get(i).getSessionId().split(",")).toList().stream()
+ .distinct()
+ .filter(Arrays.stream(newNodes.get(j).getSessionId().split(",")).toList()::contains)
+ .collect(Collectors.toSet());
+ if (!result.isEmpty()) {
+ sharedObject.getLinks().add(new JsonSharedObject.Link(i, j));
+ }
+ }
+ }
+
+ sharedObject.setUser(new JsonSharedObject.User(user.getId(), user.getUsername()));
+ }
+
+ Gson gson = new GsonBuilder().registerTypeAdapter(LocalDateTime.class, new SearchHistoryBean.LocalDateTimeAdapter().nullSafe()).create();
+
+ //Export shared object to DB
+ int sharedObjectId;
+ List obj = dao().getCollabGraphDao().findObjectsByUserId(groupId, sharedObject.getUser().getId(), application);
+ if (obj.isEmpty()) {
+ sharedObjectId = dao().getCollabGraphDao().insertSharedObject(sharedObject.getUser().getId(), groupId, application, gson.toJson(sharedObject));
+ } else {
+ dao().getCollabGraphDao().updateSharedObject(gson.toJson(sharedObject), LocalDateTime.now(), sharedObject.getUser().getId(), groupId, application);
+ sharedObjectId = obj.get(0).getId();
+ }
+
+ //--------------------RDF---------------------------
+ rdfGraph.addStatement("SharedObject/" + sharedObjectId, "schema:dateCreated", LocalDateTime.now().format(DateTimeFormatter.ISO_DATE), "literal");
+ rdfGraph.addStatement("SharedObject/" + sharedObjectId, "schema:application", sharedObject.getApplication(), "literal");
+ rdfGraph.addStatement("SharedObject/" + sharedObjectId, "schema:text", gson.toJson(sharedObject), "literal");
+ for (JsonSharedObject.Entity entity : sharedObject.getEntities()) {
+ rdfGraph.addStatement("SharedObject/" + sharedObjectId, "dependsOn", "RecognizedEntities/" + entity.getId(), "resource");
+
+ Optional annotationObj = recognisedEntities.stream().filter(s -> s.getUriId() == entity.getId()).findFirst();
+ if (annotationObj.isPresent()) {
+ for (String inputId : annotationObj.get().getInputStreams().split(",")) {
+ rdfGraph.addStatement("SharedObject/" + sharedObjectId, "dependsOn", "InputStream/" + inputId, "resource");
+ }
+ }
+ }
+
+ //--------------------End RDF ----------------------
+
+ //Add the entities after calculation to Rdf List
+ Group group = dao().getGroupDao().findByIdOrElseThrow(groupId);
+ for (Node node : nodes) {
+ rdfGraph.addEntity(PATTERN.matcher(node.getUri()).replaceAll(""), node.getUri(), node.getName(), node.getWeight(), node.getConfidence(), node.getDate());
+ }
+
+ //Print the Rdf graphs both to DB and local directories as files
+ String value = rdfGraph.printModel();
+ if (dao().getCollabGraphDao().findRdfById(user.getId()).isEmpty()) {
+ dao().getCollabGraphDao().insertRdf(user.getId(), group.getId(), value);
+ } else {
+ dao().getCollabGraphDao().updateRdf(value, user.getId());
+ }
+ return sharedObject;
+ }
+
+ public RdfModel getRdfGraph() {
+ return rdfGraph;
+ }
+
+ /**
+ * The Node class. Has all values of an entity
+ */
+
+ public static class Node {
+ private transient int id;
+ private String uri;
+ private String name;
+ private int frequency;
+ private int userId;
+ private transient String sessionId;
+ private transient double confidence;
+ private transient double weight;
+ private transient LocalDateTime date;
+ private String type;
+
+ //Node class. Receives the input from DB to be visualized
+ public Node(int id, String name, String uri, int userId, double weight, double confidence, String sessionId, String type, LocalDateTime date) {
+ this.id = id;
+ this.sessionId = sessionId;
+ this.name = name;
+ this.uri = uri;
+ this.userId = userId;
+ this.confidence = confidence;
+ this.frequency = 1;
+ this.type = type;
+ this.date = date;
+ this.weight = weight;
+ }
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(final int id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(final String name) {
+ this.name = name;
+ }
+
+ public int getFrequency() {
+ return frequency;
+ }
+
+ public void setFrequency(int frequency) {
+ this.frequency = frequency;
+ }
+
+ public int getUserId() {
+ return userId;
+ }
+
+ public void setUserId(final int userId) {
+ this.userId = userId;
+ }
+
+ public String getUri() {
+ return uri;
+ }
+
+ public void setUri(final String uri) {
+ this.uri = uri;
+ }
+
+ public void increaseFrequency() {
+ this.frequency++;
+ }
+
+ public void combineUsers(String sessionId) {
+ List sessionIdSplit = Arrays.stream(sessionId.split(",")).toList();
+ for (String id : sessionIdSplit) {
+ if (!this.sessionId.contains(id)) {
+ if (this.sessionId.isEmpty()) {
+ this.sessionId = id;
+ } else {
+ this.sessionId += "," + id;
+ }
+ }
+ }
+ }
+
+ public double getConfidence() {
+ return confidence;
+ }
+
+ public void setConfidence(final double confidence) {
+ this.confidence = confidence;
+ }
+
+ public String getSessionId() {
+ return sessionId;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(final String type) {
+ this.type = type;
+ }
+
+ public LocalDateTime getDate() {
+ return date;
+ }
+
+ public void setDate(final LocalDateTime date) {
+ this.date = date;
+ }
+
+ public double getWeight() {
+ return weight;
+ }
+
+ public void setWeight(final double weight) {
+ this.weight = weight;
+ }
+ }
+
+ /**
+ * The link class. Represents the weighted link between two entities
+ */
+ public static class Link {
+ private int source;
+ private int target;
+ private transient double weight;
+
+ public int getSource() {
+ return source;
+ }
+
+ public void setSource(final int source) {
+ this.source = source;
+ }
+
+ public int getTarget() {
+ return target;
+ }
+
+ public void setTarget(final int target) {
+ this.target = target;
+ }
+
+ public double getWeight() {
+ return weight;
+ }
+
+ public void setWeight(final double weight) {
+ this.weight = weight;
+ }
+
+ public Link(int source, int target, double weight) {
+ this.source = source;
+ this.target = target;
+ this.weight = weight;
+ }
+ }
+}
diff --git a/src/main/java/de/l3s/learnweb/pkg/PKGraphDao.java b/src/main/java/de/l3s/learnweb/pkg/PKGraphDao.java
new file mode 100644
index 000000000..6c9201e86
--- /dev/null
+++ b/src/main/java/de/l3s/learnweb/pkg/PKGraphDao.java
@@ -0,0 +1,158 @@
+package de.l3s.learnweb.pkg;
+
+import java.io.Serializable;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.time.LocalDateTime;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Optional;
+
+import org.jdbi.v3.core.mapper.RowMapper;
+import org.jdbi.v3.core.statement.StatementContext;
+import org.jdbi.v3.sqlobject.SqlObject;
+import org.jdbi.v3.sqlobject.config.RegisterRowMapper;
+import org.jdbi.v3.sqlobject.customizer.Define;
+import org.jdbi.v3.sqlobject.statement.GetGeneratedKeys;
+import org.jdbi.v3.sqlobject.statement.SqlQuery;
+import org.jdbi.v3.sqlobject.statement.SqlUpdate;
+
+import de.l3s.dbpedia.RecognisedEntity;
+import de.l3s.learnweb.user.User;
+import de.l3s.util.SqlHelper;
+
+public interface PKGraphDao extends SqlObject, Serializable {
+
+ @RegisterRowMapper(RdfObjectMapper.class)
+ @SqlQuery("SELECT * FROM learnweb_large.sl_rdf WHERE user_id = ?")
+ Optional findRdfById(int userId);
+
+ @SqlUpdate("INSERT INTO learnweb_large.sl_rdf (user_id, group_id, rdf_value) VALUES (?, ?, ?)")
+ @GetGeneratedKeys("id")
+ int insertRdf(int userId, int groupId, String rdfValue);
+
+ @SqlUpdate("UPDATE learnweb_large.sl_rdf SET rdf_value = ? WHERE user_id = ?")
+ void updateRdf(String rdfValue, int userId);
+
+
+ @RegisterRowMapper(RecognisedEntityMapper.class)
+ @SqlQuery("SELECT * FROM learnweb_large.sl_recognised_entity WHERE uri = ? AND type = ? AND user_id = ?")
+ Optional findEntityByUriAndType(String uri, String type, int userId);
+
+ @RegisterRowMapper(RecognisedEntityMapper.class)
+ @SqlQuery("SELECT * FROM learnweb_large.sl_recognised_entity WHERE user_id = ? ORDER BY created_at")
+ List findEntityByUser(int userId);
+
+ default void saveEntity(RecognisedEntity entity) {
+ if (entity.getCreatedAt() == null) {
+ entity.setCreatedAt(SqlHelper.now());
+ }
+
+ LinkedHashMap params = new LinkedHashMap<>();
+ params.put("entity_uri", SqlHelper.toNullable(entity.getUriId()));
+ params.put("type", entity.getType());
+ params.put("uri", entity.getUri());
+ params.put("input_id", entity.getInputStreams());
+ params.put("surface_form", entity.getSurfaceForm());
+ params.put("session_id", entity.getSessionId());
+ params.put("user_id", entity.getUserId());
+ params.put("confidence", entity.getConfidence());
+ params.put("created_at", entity.getCreatedAt());
+
+ Optional entityId = SqlHelper.handleSave(getHandle(), "learnweb_large.sl_recognised_entity", params)
+ .executeAndReturnGeneratedKeys().mapTo(Integer.class).findOne();
+
+ if (entityId.isPresent() && entityId.get() != 0) {
+ entity.setUriId(entityId.get());
+ }
+ }
+
+
+ @RegisterRowMapper(JsonSharedObjectMapper.class)
+ @SqlQuery("SELECT id, shared_object FROM learnweb_large.sl_shared_object WHERE group_id = ? AND user_id = ? AND application = ?")
+ List findObjectsByUserId(int groupId, int userId, String application);
+
+ @RegisterRowMapper(JsonSharedObjectMapper.class)
+ @SqlQuery("SELECT id, shared_object FROM learnweb_large.sl_shared_object WHERE user_id = ? AND application = ?")
+ List findObjectsByUserId(int userId, String application);
+
+ @RegisterRowMapper(JsonSharedObjectMapper.class)
+ @SqlQuery("SELECT id, shared_object FROM learnweb_large.sl_shared_object WHERE group_id = ? AND application = ?")
+ List findObjectsByGroupId(int groupId, String application);
+
+ @SqlUpdate("UPDATE learnweb_large.sl_shared_object SET shared_object = ?, created_at = ? WHERE user_id = ? AND group_id = ? AND application = ?")
+ void updateSharedObject(String sharedObject, LocalDateTime createdAt, int userId, int groupId, String application);
+
+ @SqlUpdate("INSERT INTO learnweb_large.sl_shared_object (user_id, group_id, application, shared_object, created_at) VALUES(?, ?, ?, ?, CURRENT_TIMESTAMP())")
+ @GetGeneratedKeys("id")
+ int insertSharedObject(int userId, int groupId, String application, String sharedObject);
+
+
+ @SqlUpdate("INSERT INTO learnweb_large.sl_suggested_query (user_id, reference_query, query, source, `index`, options, graph) VALUES (?, ?, ?, ?, ?, ?, ?)")
+ void insertSuggestedQuery(User user, String referenceQuery, String query, String source, int index, String options, String graph);
+
+
+ @SqlUpdate("INSERT INTO learnweb_large.sl_search_entity SET search_id = ?, entity_uri = ?")
+ int insertQueryResult(int searchId, int uriId);
+
+ @SqlQuery("SELECT search_id FROM learnweb_large.sl_search_entity WHERE entity_uri = ?")
+ List findSearchIdByResult(int uriId);
+
+
+ @SqlUpdate("INSERT INTO learnweb_large.sl_input_stream (user_id, type, object_id, content, date_created) VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP())")
+ @GetGeneratedKeys("id")
+ int insertInputStream(int userId, String type, int objectId, String content);
+
+ @RegisterRowMapper(InputStreamRdfMapper.class)
+ @SqlQuery("SELECT * FROM learnweb_large.sl_input_stream WHERE id IN ()")
+ List findInputContentById(@Define("inputIds") String inputIds);
+
+
+ class RecognisedEntityMapper implements RowMapper {
+ @Override
+ public RecognisedEntity map(final ResultSet rs, final StatementContext ctx) throws SQLException {
+ RecognisedEntity annotation = new RecognisedEntity();
+ annotation.setUriId(rs.getInt("entity_uri"));
+ annotation.setUri(rs.getString("uri"));
+ annotation.setConfidence(rs.getDouble("confidence"));
+ annotation.setSurfaceForm(rs.getString("surface_form"));
+ annotation.setType(rs.getString("type"));
+ annotation.setSessionId(rs.getString("session_id"));
+ annotation.setUserId(rs.getInt("user_id"));
+ annotation.setInputStreams(rs.getString("input_id"));
+ annotation.setCreatedAt(SqlHelper.getLocalDateTime(rs.getTimestamp("created_at")));
+ return annotation;
+ }
+ }
+
+ class JsonSharedObjectMapper implements RowMapper {
+ @Override
+ public JsonSharedObject map(final ResultSet rs, final StatementContext ctx) throws SQLException {
+ JsonSharedObject sharedObject = new JsonSharedObject(rs.getString("shared_object"), true);
+ sharedObject.setId(rs.getInt("id"));
+ return sharedObject;
+ }
+ }
+
+ class RdfObjectMapper implements RowMapper {
+ @Override
+ public RdfObject map(final ResultSet rs, final StatementContext ctx) throws SQLException {
+ RdfObject obj = new RdfObject(rs.getInt("user_id"), rs.getString("rdf_value"));
+ return obj;
+ }
+ }
+
+ class InputStreamRdfMapper implements RowMapper {
+ @Override
+ public InputStreamRdf map(final ResultSet rs, final StatementContext ctx) throws SQLException {
+ InputStreamRdf obj = new InputStreamRdf(
+ rs.getInt("id"),
+ rs.getInt("user_id"),
+ rs.getString("type"),
+ rs.getInt("object_id"),
+ rs.getString("content"),
+ rs.getDate("date_created"));
+ return obj;
+ }
+ }
+}
diff --git a/src/main/java/de/l3s/learnweb/pkg/RdfModel.java b/src/main/java/de/l3s/learnweb/pkg/RdfModel.java
new file mode 100644
index 000000000..c5be35fb1
--- /dev/null
+++ b/src/main/java/de/l3s/learnweb/pkg/RdfModel.java
@@ -0,0 +1,120 @@
+package de.l3s.learnweb.pkg;
+
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.regex.Pattern;
+
+import org.apache.jena.rdf.model.Literal;
+import org.apache.jena.rdf.model.Model;
+import org.apache.jena.rdf.model.ModelFactory;
+import org.apache.jena.rdf.model.Property;
+import org.apache.jena.rdf.model.RDFNode;
+import org.apache.jena.rdf.model.Resource;
+import org.apache.jena.rdf.model.Statement;
+import org.apache.jena.rdf.model.StmtIterator;
+
+import de.l3s.learnweb.group.Group;
+import de.l3s.learnweb.user.User;
+
+public class RdfModel {
+ public static final String prefixBase = "https://github.com/tibonto/PKGonto/";
+ private static final String prefixSchema = "https://schema.org/";
+ private static final String prefixEducor = "https://github.com/tibonto/educor#";
+ private static final String prefixFoaf = "http://xmlns.com/foaf/spec/";
+ private static final Pattern COMPILE = Pattern.compile(" ");
+ private static final Pattern PATTERN = Pattern.compile("\\<[^>]*>");
+
+ private Model model;
+
+ public RdfModel() {
+ model = ModelFactory.createDefaultModel();
+ model.setNsPrefix("schema", prefixSchema);
+ model.setNsPrefix("educor", prefixEducor);
+ model.setNsPrefix("foaf", prefixFoaf);
+
+ model.createResource("SharedObject/Negativity_Exponential_Algorithm");
+ }
+
+ public void addStatement(String sbj, String pre, String obj, String type) {
+ Resource subject = model.getResource((sbj.contains(":")) ? COMPILE.matcher(sbj).replaceAll("_") : prefixBase + COMPILE.matcher(sbj).replaceAll("_"));
+ Property predicate = model.createProperty(pre);
+
+ switch (type) {
+ case "literal" -> {
+ if (obj == null) {
+ obj = "";
+ }
+ Literal object = model.createLiteral(obj);
+ List statementsToRemove = new ArrayList<>();
+ StmtIterator existingStatements = model.listStatements(subject, predicate, (RDFNode) null);
+ while (existingStatements.hasNext()) {
+ Statement statement = existingStatements.nextStatement();
+ statementsToRemove.add(statement);
+ }
+ for (Statement statement : statementsToRemove) {
+ model.remove(statement);
+ }
+ if (subject == null) {
+ subject = model.createResource(COMPILE.matcher(sbj).replaceAll("_"));
+ model.createStatement(subject, predicate, object);
+ }
+ subject.addLiteral(predicate, object);
+ }
+ case "resource" -> {
+ RDFNode object = model.getResource(COMPILE.matcher(obj).replaceAll("_"));
+ if (object == null) {
+ object = model.createResource(COMPILE.matcher(obj).replaceAll("_"));
+ }
+ subject.addProperty(predicate, object);
+ }
+ default -> {
+ }
+ }
+ }
+
+ public void addGroup(final User user, final Group group) {
+ addStatement("educor:User/" + user.getId(), "schema:email", user.getEmail(), "literal");
+ addStatement("educor:User/" + user.getId(), "educor:hasProfile", "educor:UserProfile/" + user.getId(), "resource");
+ addStatement("educor:UserProfile/" + user.getId(), prefixBase + "username", user.getUsername(), "literal");
+ addStatement("educor:User/" + user.getId(), "schema:name", user.getUsername(), "literal");
+ addStatement("foaf:Group/" + group.getId(), "schema:description", PATTERN.matcher(group.getDescription()).replaceAll(""), "literal");
+ addStatement("foaf:Group/" + group.getId(), "schema:name", group.getTitle(), "literal");
+ addStatement("foaf:Group/" + group.getId(), "schema:dateCreated", group.getCreatedAt().format(DateTimeFormatter.ISO_DATE), "literal");
+ addStatement("educor:User/" + user.getId(), "schema:memberOf", "foaf:Group/" + group.getId(), "resource");
+ }
+
+ public void addEntity(String name, String uri, String surfaceForm, double weight, double score, LocalDateTime time) {
+ if (Objects.equals(uri, "default")) {
+ return;
+ }
+
+ addStatement("RecognizedEntities/" + name, "schema:identifier", uri, "resource");
+ addStatement("RecognizedEntities/" + name, prefixBase + "surfaceForm", surfaceForm, "literal");
+ addStatement("RecognizedEntities/" + name, prefixBase + "confidenceScore", String.valueOf(score), "literal");
+ addStatement("RecognizedEntities/" + name, "schema:dateCreated", time.format(DateTimeFormatter.ISO_DATE), "literal");
+ addStatement("RecognizedEntities/" + name, prefixBase + "weight", String.valueOf(weight), "literal");
+ }
+
+ public String printModel() {
+ // list the statements in the Model
+ //print out the predicate, subject and object of each statement
+ StringWriter out = new StringWriter();
+ model.write(out, "TTL", prefixBase);
+ //model.write(writer, "TTL", prefixBase);
+ return out.toString();
+ }
+
+ public Model getModel() {
+ return model;
+ }
+
+ public void makeModelFromString(final String inputStream) {
+ StringReader stringReader = new StringReader(inputStream);
+ model.read(stringReader, prefixBase, "TTL");
+ }
+}
diff --git a/src/main/java/de/l3s/learnweb/pkg/RdfObject.java b/src/main/java/de/l3s/learnweb/pkg/RdfObject.java
new file mode 100644
index 000000000..44fcc51e3
--- /dev/null
+++ b/src/main/java/de/l3s/learnweb/pkg/RdfObject.java
@@ -0,0 +1,29 @@
+package de.l3s.learnweb.pkg;
+
+
+public class RdfObject {
+
+ private int userId;
+ private String rdfValue;
+
+ public RdfObject(final int userId, final String rdfValue) {
+ this.userId = userId;
+ this.rdfValue = rdfValue;
+ }
+
+ public int getUserId() {
+ return userId;
+ }
+
+ public void setUserId(final int userId) {
+ this.userId = userId;
+ }
+
+ public String getRdfValue() {
+ return rdfValue;
+ }
+
+ public void setRdfValue(final String rdfValue) {
+ this.rdfValue = rdfValue;
+ }
+}
diff --git a/src/main/java/de/l3s/learnweb/resource/search/SearchBean.java b/src/main/java/de/l3s/learnweb/resource/search/SearchBean.java
index f81553425..67ebf1d48 100644
--- a/src/main/java/de/l3s/learnweb/resource/search/SearchBean.java
+++ b/src/main/java/de/l3s/learnweb/resource/search/SearchBean.java
@@ -4,23 +4,45 @@
import java.io.ObjectInputStream;
import java.io.Serial;
import java.io.Serializable;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URLEncoder;
+import java.net.http.HttpClient;
+import java.net.http.HttpRequest;
+import java.net.http.HttpResponse;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
+import java.util.stream.Collectors;
import jakarta.annotation.PostConstruct;
+import jakarta.annotation.PreDestroy;
import jakarta.faces.application.FacesMessage;
-import jakarta.faces.view.ViewScoped;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Inject;
import jakarta.inject.Named;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
+import org.omnifaces.cdi.ViewScoped;
import org.omnifaces.util.Beans;
import org.omnifaces.util.Faces;
import org.omnifaces.util.Servlets;
import org.primefaces.PrimeFaces;
+import org.primefaces.event.SelectEvent;
+import org.primefaces.model.DialogFrameworkOptions;
+
+import com.google.gson.Gson;
+import com.google.gson.JsonArray;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
import de.l3s.interweb.client.Interweb;
import de.l3s.learnweb.beans.ApplicationBean;
@@ -39,6 +61,11 @@
import de.l3s.learnweb.resource.search.filters.FilterType;
import de.l3s.learnweb.resource.search.solrClient.FileInspector.FileInfo;
import de.l3s.learnweb.resource.web.WebResource;
+import de.l3s.learnweb.pkg.PKGraphDao;
+import de.l3s.learnweb.pkg.JsonSharedObject;
+import de.l3s.learnweb.pkg.PKGraph;
+import de.l3s.dbpedia.RecognisedEntity;
+import de.l3s.dbpedia.DbpediaSpotlightService;
import de.l3s.learnweb.user.Organisation;
import de.l3s.learnweb.user.User;
import de.l3s.util.StringHelper;
@@ -51,6 +78,7 @@ public class SearchBean extends ApplicationBean implements Serializable {
private static final Logger log = LogManager.getLogger(SearchBean.class);
private static final int MIN_RESOURCES_PER_GROUP = 2;
+ private static final int RESULT_LIMIT = 32;
// Values from views are stored here
private String query = "";
@@ -71,6 +99,19 @@ public class SearchBean extends ApplicationBean implements Serializable {
private int counter = 0;
private transient List resourcesGroupedBySource;
+ private Boolean isUserActive;
+ private List snippetClicked;
+ private List recommendations;
+ private boolean showRecommendations;
+ private boolean showRelatedQueries;
+
+ private transient String edurecRequest;
+ private transient String suggestedEntries;
+
+ @Inject
+ private PKGraphDao pkGraphDao;
+ @Inject
+ private DbpediaSpotlightService dbpediaSpotlightService;
@PostConstruct
public void init() {
@@ -78,7 +119,7 @@ public void init() {
searchFilters = new SearchFilters(SearchMode.text);
}
- public void onLoad() {
+ public void onLoad() throws InterruptedException {
BeanAssert.authorized(isLoggedIn());
log.debug("mode/action: {}; filter: {} - service: {}; query:{}", queryMode, queryFilters, queryService, query);
@@ -100,11 +141,19 @@ public void onLoad() {
onSearch();
Servlets.setNoCacheHeaders(Faces.getResponse());
+ showRecommendations = !getUser().getOrganisation().getOption(Organisation.Option.Search_Disable_recommendations);
+ showRelatedQueries = !getUser().getOrganisation().getOption(Organisation.Option.Search_Disable_related_searches);
+
+ isUserActive = false;
+ snippetClicked = new ArrayList<>();
+ for (int i = 0; i < RESULT_LIMIT; i++) {
+ snippetClicked.add(false);
+ }
}
// -------------------------------------------------------------------------
- public String onSearch() {
+ private String onSearch() throws InterruptedException {
// search if a query is given and (it was not searched before or the query or search mode has been changed)
if (!StringUtils.isEmpty(query) && (null == search || !query.equals(search.getQuery()) || searchMode != search.getMode() || !queryService.equals(searchService.name()))) {
if (null != search) {
@@ -137,6 +186,10 @@ public String onSearch() {
log(Action.searching, 0, search.getId(), query);
resourcesGroupedBySource = null;
+
+ if (showRecommendations) {
+ createSearchRecommendation();
+ }
}
return "/lw/search.xhtml?faces-redirect=true";
@@ -236,6 +289,116 @@ public void commandOnResourceSelect() {
setSelectedResource(resource);
}
+ public void suggestQueries() throws IOException, InterruptedException {
+ DialogFrameworkOptions options = DialogFrameworkOptions.builder()
+ .modal(true)
+ .draggable(false)
+ .resizable(false)
+ .closeOnEscape(true)
+ .onHide("const f = $('#navbar_form\\\\:searchfield'); if (f) {f.data.bypass=1};")
+ .build();
+
+ List suggestedBing = getBingSuggestQueries(query);
+ List suggestedEduRec = getEduRecSuggestQueries(query);
+ log.debug("Suggested queries: bing: {}, edurec: {}", suggestedBing, suggestedEduRec);
+
+ final List queries = new ArrayList<>();
+ if (suggestedBing != null) {
+ int index = 1;
+ suggestedBing = suggestedBing.subList(0, Math.min(5, suggestedBing.size()));
+ for (String query : suggestedBing) {
+ queries.add(new SuggestedQuery(index++, "bing", query));
+ }
+ }
+ if (suggestedEduRec != null) {
+ int index = 101;
+ suggestedEduRec = suggestedEduRec.subList(0, Math.min(5, suggestedEduRec.size()));
+ for (String query : suggestedEduRec) {
+ queries.add(new SuggestedQuery(index++, "edurec", query));
+ }
+ }
+ Collections.shuffle(queries);
+ final List randQueries = new ArrayList<>();
+ int index = 1;
+ for (SuggestedQuery query : queries) {
+ randQueries.add(new SuggestedQuery(query.id(), index++, query.source(), query.query()));
+ }
+ suggestedEntries = new Gson().toJson(randQueries);
+
+ FacesContext.getCurrentInstance().getExternalContext().getFlash().put("queries", randQueries);
+ PrimeFaces.current().dialog().openDynamic("/dialogs/suggestQueries.jsf", options, null);
+ }
+
+ public void onSuggestedQuerySelected(SelectEvent event) {
+ SuggestedQuery query = event.getObject();
+ log.debug("Selected suggested query: {}", query);
+ pkGraphDao.insertSuggestedQuery(getUser(), getQuery(), query.query(), query.source(), query.index(), suggestedEntries, edurecRequest);
+
+ Faces.redirect("/lw/search.jsf?action=" + queryMode + "&service=" + queryService + "&query=" + URLEncoder.encode(query.query(), StandardCharsets.UTF_8));
+ }
+
+ private List getBingSuggestQueries(String query) throws IOException, InterruptedException {
+ final URI requestUri = URI.create("https://api.bing.com/osjson.aspx?query=" + URLEncoder.encode(query, StandardCharsets.UTF_8));
+
+ HttpRequest request = HttpRequest.newBuilder().GET().header("Content-type", "application/json").uri(requestUri).build();
+ HttpResponse response = HttpClient.newHttpClient().send(request, HttpResponse.BodyHandlers.ofString());
+
+ if (response.statusCode() == HttpURLConnection.HTTP_OK) {
+ JsonArray jsonObject = JsonParser.parseString(response.body()).getAsJsonArray();
+ JsonArray jsonArray = jsonObject.get(1).getAsJsonArray();
+ List queries = new ArrayList<>();
+ for (JsonElement jsonElement : jsonArray) {
+ queries.add(jsonElement.getAsString());
+ }
+ return queries;
+ }
+
+ return null;
+ }
+
+ private List getEduRecSuggestQueries(String query) throws IOException, InterruptedException {
+ final String requestUrl = "https://edurec.kevinhaller.dev/recommend/5/items";
+
+ JsonArray nodesArray = new JsonArray();
+ JsonObject recordObject = new JsonObject();
+ recordObject.add("nodes", nodesArray);
+ JsonObject rootObject = new JsonObject();
+ rootObject.add("record", recordObject);
+
+ PKGraph pkg = getUserBean().getUserPkg();
+ JsonSharedObject request = pkg.prepareCollabRec(10, 10);
+ if (request != null) {
+ for (JsonSharedObject.Entity entity : request.getEntities()) {
+ JsonObject graphNode = new JsonObject();
+ graphNode.addProperty("uri", entity.getUri());
+ graphNode.addProperty("query", entity.getQuery());
+ graphNode.addProperty("weight", entity.getWeight());
+ nodesArray.add(graphNode);
+ }
+ }
+ edurecRequest = new Gson().toJson(rootObject);
+
+ HttpClient client = HttpClient.newHttpClient();
+
+ HttpRequest.Builder requestBuilder = HttpRequest.newBuilder();
+ requestBuilder.uri(URI.create(requestUrl));
+ requestBuilder.header("Content-Type", "application/json");
+ requestBuilder.POST(HttpRequest.BodyPublishers.ofString(edurecRequest));
+
+ HttpResponse response = client.send(requestBuilder.build(), HttpResponse.BodyHandlers.ofString());
+
+ JsonObject jsonRoot = JsonParser.parseString(response.body()).getAsJsonObject();
+ JsonArray suggestions = jsonRoot.getAsJsonArray("list");
+
+ List queries = new ArrayList<>();
+ for (JsonElement suggestion : suggestions) {
+ final String value = suggestion.getAsJsonObject().get("iri").getAsString();
+ queries.add(value.substring(value.lastIndexOf('/') + 1).replace('_', ' '));
+ }
+
+ return queries;
+ }
+
/**
* This method logs a resource click event.
*/
@@ -243,7 +406,17 @@ public void commandOnResourceClick() {
try {
Map params = Faces.getRequestParameterMap();
int tempResourceId = Integer.parseInt(params.get("resourceId"));
-
+ isUserActive = true;
+ if (!snippetClicked.get(search.getResources().indexOf(search.getResources().get(tempResourceId)))) {
+ Resource resource = search.getResources().get(tempResourceId).getResource();
+ List recognisedEntities = dbpediaSpotlightService.storeStreamAndExtractEntities(getUser(), "web", search.getId(), resource.getUrl());
+ int inputId = dbpediaSpotlightService.storeEntities(getSessionId(), getUser(), recognisedEntities);
+
+ getUserBean().getUserPkg().addRdfStatement("schema:WebPage/" + inputId, "schema:title", resource.getTitle(), "literal");
+ getUserBean().getUserPkg().addRdfStatement("schema:WebPage/" + inputId, "schema:url", resource.getUrl(), "resource");
+ getUserBean().getUserPkg().addRdfStatement("SearchQuery/" + search.getId(), "generatesResult", "schema:WebPage/" + inputId, "resource");
+ }
+ snippetClicked.set(search.getResources().indexOf(search.getResources().get(tempResourceId)), true);
search.logResourceClicked(tempResourceId, getUser());
} catch (Exception e) {
log.error("Can't log resource opened event", e);
@@ -251,7 +424,89 @@ public void commandOnResourceClick() {
}
/**
- * True if a the user has started a search request.
+ * Calls when the user unloads the page.
+ * If the user is active searching, call the dbpedia-spotlight recognition on: query, web and all snippets
+ */
+ @PreDestroy
+ public void destroy() throws Exception {
+ if (isUserActive) {
+ List queryEntities = dbpediaSpotlightService.storeStreamAndExtractEntities(getUser(), "query", search.getId(), search.getQuery());
+ dbpediaSpotlightService.storeEntities(getSessionId(), getUser(), queryEntities);
+
+ for (ResourceDecorator snippet : search.getResources()) {
+ String s = snippet.getTitle().split("\\|")[0].split("-")[0];
+ String type = snippetClicked.get(search.getResources().indexOf(snippet)) ? "snippet_clicked" : "snippet_not_clicked";
+
+ List snippetEntities = dbpediaSpotlightService.storeStreamAndExtractEntities(getUser(), type, search.getId(), "" + s + " " + snippet.getDescription());
+ int inputId = dbpediaSpotlightService.storeEntities(getSessionId(), getUser(), snippetEntities);
+
+ getUserBean().getUserPkg().addRdfStatement("Snippet/" + inputId, "schema:title", s, "literal");
+ getUserBean().getUserPkg().addRdfStatement("Snippet/" + inputId, "schema:url", snippet.getUrl(), "literal");
+ getUserBean().getUserPkg().addRdfStatement("SearchQuery/" + search.getId(), "generatesResult", "Snippet/" + inputId, "resource");
+ }
+
+ // getUserBean().getUserPkg().removeDuplicatingNodesAndLinks();
+ // Update one for recommendation, one for collabGraph which marks the user's active state.
+ // FIXME: it seems we hardcode to use only the first group
+ // getUserBean().getUserPkg().createSharedObject(getUser(), getUser().getGroups().get(0).getId(), 5, false, "recommendation");
+ }
+ }
+
+ /**
+ * Create a small recommender system for the current search query.
+ * Find the top 3 entities from other users in shared object form, exclude the results from this user, based on
+ * their weight in Pkg.
+ */
+ private void createSearchRecommendation() {
+ //Initialization
+ recommendations = new ArrayList<>();
+ List sharedObjects = pkGraphDao.findObjectsByUserId(getUser().getId(), "recommendation");
+ if (sharedObjects == null) {
+ return;
+ }
+ Map entityRank = new HashMap<>();
+ List chosenEntities = new ArrayList<>();
+
+ for (JsonSharedObject sharedObject : sharedObjects) {
+ if (sharedObject.getUser().getId() != getUser().getId()) {
+ for (JsonSharedObject.Entity entity : sharedObject.getEntities()) {
+ if (chosenEntities.stream().anyMatch(s -> s.getQuery().equals(entity.getQuery()))) {
+ chosenEntities.stream()
+ .filter(s -> s.getQuery().equals(entity.getQuery()))
+ .findFirst()
+ .filter(s -> s.getWeight() < entity.getWeight())
+ .ifPresent(s -> s.setWeight(entity.getWeight()));
+ } else {
+ chosenEntities.add(entity);
+ }
+ }
+ }
+ }
+ for (JsonSharedObject sharedObject : sharedObjects) {
+ if (sharedObject.getUser().getId() == getUser().getId()) {
+ for (JsonSharedObject.Entity entity : sharedObject.getEntities()) {
+ chosenEntities.removeIf(s -> (Objects.equals(s.getUri(), entity.getUri())));
+ }
+ }
+ }
+ for (JsonSharedObject.Entity entity : chosenEntities) {
+ entityRank.put(entity.getQuery(), entity.getWeight());
+ }
+
+ //entries list will be used to store and sort the entities based on their weights
+ List> entries = new ArrayList<>(entityRank.entrySet());
+ //No entries are found then we don't need to display the results
+ if (entries.isEmpty()) {
+ return;
+ }
+ entries.sort((o1, o2) -> o2.getValue().compareTo(o1.getValue()));
+
+ //Get the first 3 entities of the results, or the whole results if entries have less than 3 elements
+ recommendations = entries.stream().map(entry -> entry.getKey()).collect(Collectors.toList()).subList(0, Math.min(3, entries.size()));
+ }
+
+ /**
+ * True if the user has started a search request.
*/
public boolean isSearched() {
return search != null;
@@ -338,6 +593,10 @@ public String createFilterUrl(FilterType filterType, String value) {
return sb.toString();
}
+ public List getRecommendations() {
+ return recommendations;
+ }
+
public Search getSearch() {
return search;
}
@@ -425,4 +684,12 @@ public List getResourcesGroupedBySource() {
}
return resourcesGroupedBySource;
}
+
+ public boolean isShowRecommendations() {
+ return showRecommendations;
+ }
+
+ public boolean isShowRelatedQueries() {
+ return showRelatedQueries;
+ }
}
diff --git a/src/main/java/de/l3s/learnweb/resource/search/SuggestQueryDialog.java b/src/main/java/de/l3s/learnweb/resource/search/SuggestQueryDialog.java
new file mode 100644
index 000000000..cf19ca1d0
--- /dev/null
+++ b/src/main/java/de/l3s/learnweb/resource/search/SuggestQueryDialog.java
@@ -0,0 +1,45 @@
+package de.l3s.learnweb.resource.search;
+
+import java.io.Serial;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import jakarta.annotation.PostConstruct;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Named;
+
+import org.primefaces.PrimeFaces;
+import org.primefaces.event.SelectEvent;
+
+@Named
+@ViewScoped
+public class SuggestQueryDialog implements Serializable {
+ @Serial
+ private static final long serialVersionUID = 7572402402655403989L;
+
+ private SuggestedQuery query;
+ private List queries = new ArrayList<>();
+
+ @PostConstruct
+ public void init() {
+ this.queries = (List) FacesContext.getCurrentInstance().getExternalContext().getFlash().get("queries");
+ }
+
+ public void setQuery(final SuggestedQuery query) {
+ this.query = query;
+ }
+
+ public SuggestedQuery getQuery() {
+ return query;
+ }
+
+ public List getQueries() {
+ return queries;
+ }
+
+ public void onQuerySelect(SelectEvent query) {
+ PrimeFaces.current().dialog().closeDynamic(query.getObject());
+ }
+}
diff --git a/src/main/java/de/l3s/learnweb/resource/search/SuggestedQuery.java b/src/main/java/de/l3s/learnweb/resource/search/SuggestedQuery.java
new file mode 100644
index 000000000..5e1c0cc53
--- /dev/null
+++ b/src/main/java/de/l3s/learnweb/resource/search/SuggestedQuery.java
@@ -0,0 +1,9 @@
+package de.l3s.learnweb.resource.search;
+
+import java.io.Serializable;
+
+public record SuggestedQuery(int id, int index, String source, String query) implements Serializable {
+ public SuggestedQuery(int id, String source, String query) {
+ this(id, 0, source, query);
+ }
+}
diff --git a/src/main/java/de/l3s/learnweb/searchhistory/SearchHistoryBean.java b/src/main/java/de/l3s/learnweb/searchhistory/SearchHistoryBean.java
index 040c8edf8..ea4bc77fd 100644
--- a/src/main/java/de/l3s/learnweb/searchhistory/SearchHistoryBean.java
+++ b/src/main/java/de/l3s/learnweb/searchhistory/SearchHistoryBean.java
@@ -1,7 +1,10 @@
package de.l3s.learnweb.searchhistory;
+import java.io.IOException;
import java.io.Serial;
import java.io.Serializable;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -15,9 +18,19 @@
import org.apache.commons.lang3.StringUtils;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.TypeAdapter;
+import com.google.gson.stream.JsonReader;
+import com.google.gson.stream.JsonWriter;
+
import de.l3s.learnweb.beans.ApplicationBean;
import de.l3s.learnweb.beans.BeanAssert;
+import de.l3s.learnweb.group.GroupDao;
+import de.l3s.learnweb.pkg.JsonSharedObject;
+import de.l3s.learnweb.pkg.PKGraph;
import de.l3s.learnweb.resource.ResourceDecorator;
+import de.l3s.learnweb.pkg.CollabGraph;
import de.l3s.learnweb.user.User;
import de.l3s.learnweb.user.UserDao;
@@ -39,19 +52,27 @@ public class SearchHistoryBean extends ApplicationBean implements Serializable {
@Inject
private UserDao userDao;
-
+ @Inject
+ private GroupDao groupDao;
@Inject
private SearchHistoryDao searchHistoryDao;
+ private static final String patternDate = "yyyy-MM-dd";
+ private static final String patternTime = "HH:mm:ss";
+ private static final String patternDateTime = String.format("%s %s", patternDate, patternTime);
+ private transient List sharedObjects = new ArrayList<>();
+ private transient Gson gson;
+
/**
* Load the variables that needs values before the view is rendered.
*/
- public void onLoad() {
+ public void onLoad() throws IOException, InterruptedException {
BeanAssert.authorized(isLoggedIn());
if (selectedUserId == 0) {
selectedUserId = getUser().getId();
}
+ gson = new GsonBuilder().registerTypeAdapter(LocalDateTime.class, new LocalDateTimeAdapter().nullSafe()).create();
}
public SearchQuery getSelectedQuery() {
@@ -142,6 +163,7 @@ public void setSelectedGroupId(int selectedGroupId) {
//log.info("selected group id: " + selectedGroupId);
this.selectedGroupId = selectedGroupId;
+ calculateEntities();
}
public void search() {
@@ -190,4 +212,81 @@ public String getSearchQuery() {
public void setSearchQuery(final String searchQuery) {
this.searchQuery = searchQuery;
}
+
+ public static class LocalDateTimeAdapter extends TypeAdapter {
+ DateTimeFormatter format = DateTimeFormatter.ofPattern(patternDateTime);
+
+ @Override
+ public void write(JsonWriter out, LocalDateTime value) throws IOException {
+ if (value != null) {
+ out.value(value.format(format));
+ }
+ }
+
+ @Override
+ public LocalDateTime read(JsonReader in) throws IOException {
+ return LocalDateTime.parse(in.nextString(), format);
+ }
+ }
+
+ /**
+ * Calculates and returns a list of top entries for each user belonging to the group.
+ * Also exports a rdf turtle file for every user in the group
+ * */
+ private void calculateEntities() {
+ //For testing only
+ // userPkg.createSharedObject(getUser(), selectedGroupId, 5, true, "negative5SharedObject");
+ // userPkg.createSharedObject(getUser(), selectedGroupId, 10, false, "positive10SharedObject");
+
+ sharedObjects = new ArrayList<>();
+ for (User user : userDao.findByGroupId(selectedGroupId)) {
+ PKGraph userPkg = PKGraph.createPkg(user);
+ JsonSharedObject object = userPkg.createSharedObject(user, selectedGroupId, 3, false, "collabGraph");
+
+ if (object != null) {
+ sharedObjects.add(object);
+ }
+ }
+ }
+
+ /**
+ * Create the CollabGraph file, export it to visualisation.
+ * @return the Json string of the collabGraph
+ * */
+ public String getQueriesJson() {
+ if (sessions == null || sessions.isEmpty() || selectedGroupId <= 0) {
+ return null;
+ }
+ //Get the CollabGraph
+ CollabGraph calculatedQuery = new CollabGraph(new ArrayList<>(), new ArrayList<>()).createCollabGraph(sharedObjects);
+ //Export file
+ return gson.toJson(calculatedQuery);
+ }
+
+ public String getSingleQueryJson() {
+ PKGraph userPkg = getUserBean().getUserPkg();
+ if (getCurrentUser() != getUser()) {
+ userPkg = PKGraph.createPkg(getCurrentUser());
+ }
+
+ JsonSharedObject obj = userPkg.createSingleGraph();
+ if (obj == null) {
+ return "";
+ }
+ CollabGraph calculatedQuery = new CollabGraph(new ArrayList<>(), new ArrayList<>()).createSingleGraph(obj);
+ return gson.toJson(calculatedQuery);
+ }
+
+ public String getRdfModel() {
+ PKGraph userPkg = getUserBean().getUserPkg();
+ if (getCurrentUser() != getUser()) {
+ userPkg = PKGraph.createPkg(getCurrentUser());
+ }
+
+ JsonSharedObject obj = userPkg.createSingleGraph();
+ if (obj == null) {
+ return "";
+ }
+ return userPkg.getRdfGraph().printModel();
+ }
}
diff --git a/src/main/java/de/l3s/learnweb/searchhistory/SearchHistoryDao.java b/src/main/java/de/l3s/learnweb/searchhistory/SearchHistoryDao.java
index 92eb4431f..6f47a40c2 100644
--- a/src/main/java/de/l3s/learnweb/searchhistory/SearchHistoryDao.java
+++ b/src/main/java/de/l3s/learnweb/searchhistory/SearchHistoryDao.java
@@ -73,9 +73,9 @@ default List findSearchResultsByQuery(SearchQuery query, int
rd.setSaved(rs.getInt("saved") > 0);
// quick fix to not show annotations for each result of same url
- if (rd.getClicked()) {
- rd.setAnnotations(findAnnotationsByUserIdAndUrl(res.getUserId(), res.getUrl()));
- }
+ // if (rd.getClicked()) {
+ // rd.setAnnotations(findAnnotationsByUserIdAndUrl(res.getUserId(), res.getUrl()));
+ // }
return rd;
}).list();
}
diff --git a/src/main/java/de/l3s/learnweb/user/Organisation.java b/src/main/java/de/l3s/learnweb/user/Organisation.java
index ad853f344..0efa1fc6b 100644
--- a/src/main/java/de/l3s/learnweb/user/Organisation.java
+++ b/src/main/java/de/l3s/learnweb/user/Organisation.java
@@ -55,6 +55,8 @@ public enum Option {
Resource_Disallow_editing,
Resource_Disable_video_preview,
Search_Chat_enabled,
+ Search_Disable_recommendations,
+ Search_Disable_related_searches,
}
private int id;
diff --git a/src/main/java/de/l3s/learnweb/user/ProfileBean.java b/src/main/java/de/l3s/learnweb/user/ProfileBean.java
index d26685793..a2e44f3e1 100644
--- a/src/main/java/de/l3s/learnweb/user/ProfileBean.java
+++ b/src/main/java/de/l3s/learnweb/user/ProfileBean.java
@@ -33,6 +33,8 @@
import de.l3s.learnweb.logging.Action;
import de.l3s.learnweb.resource.File;
import de.l3s.learnweb.resource.FileDao;
+import de.l3s.dbpedia.RecognisedEntity;
+import de.l3s.dbpedia.DbpediaSpotlightService;
import de.l3s.learnweb.user.User.Gender;
import de.l3s.util.Image;
@@ -76,6 +78,9 @@ public class ProfileBean extends ApplicationBean implements Serializable {
@Inject
private GroupDao groupDao;
+ @Inject
+ private DbpediaSpotlightService dbpediaSpotlightService;
+
public void onLoad() {
User loggedInUser = getUser();
BeanAssert.authorized(loggedInUser);
@@ -137,7 +142,7 @@ public void handleFileUpload(FileUploadEvent event) {
}
}
- public void onSaveProfile() {
+ public void onSaveProfile() throws Exception {
// send confirmation mail if mail has been changed
if (StringUtils.isNotEmpty(email) && !StringUtils.equals(selectedUser.getEmail(), email)) {
selectedUser.setEmail(email);
@@ -151,6 +156,11 @@ public void onSaveProfile() {
userDao.save(selectedUser);
+ // FIXME: check if changed before
+ // Call dbpedia-spotlight recognition
+ List recognisedEntities = dbpediaSpotlightService.storeStreamAndExtractEntities(getUser(), "user", selectedUser.getId(), selectedUser.getInterest() + " " + selectedUser.getProfession());
+ dbpediaSpotlightService.storeEntities(getSessionId(), selectedUser, recognisedEntities);
+
log(Action.changing_profile, 0, selectedUser.getId());
addGrowl(FacesMessage.SEVERITY_INFO, "changes_saved");
}
diff --git a/src/main/java/de/l3s/learnweb/user/UserBean.java b/src/main/java/de/l3s/learnweb/user/UserBean.java
index ddb7098c0..ff92f077f 100644
--- a/src/main/java/de/l3s/learnweb/user/UserBean.java
+++ b/src/main/java/de/l3s/learnweb/user/UserBean.java
@@ -36,6 +36,7 @@
import de.l3s.learnweb.group.Group;
import de.l3s.learnweb.i18n.MessagesBundle;
import de.l3s.learnweb.resource.survey.SurveyResponse;
+import de.l3s.learnweb.pkg.PKGraph;
import de.l3s.learnweb.user.Organisation.Option;
import de.l3s.util.HasId;
import de.l3s.util.StringHelper;
@@ -53,6 +54,7 @@ public class UserBean implements Serializable {
private ColorTheme colorTheme = ColorTheme.emerald;
private final HashMap anonymousPreferences = new HashMap<>(); // preferences for users who are not logged in
+ private transient PKGraph pkg;
private transient User user; // to avoid inconsistencies with the user cache the UserBean does not store the user itself
private transient User moderatorUser; // in this field we store a moderator account while the moderator is logged in on another account
private transient Organisation activeOrganisation;
@@ -75,6 +77,13 @@ public void init() {
}
}
+ public PKGraph getUserPkg() {
+ if (pkg == null && isLoggedIn()) {
+ pkg = PKGraph.createPkg(getUser());
+ }
+ return pkg;
+ }
+
/**
* This method sets values which are required by the Download Servlet
* and provides data which is shown on the Tomcat manager session page.
diff --git a/src/main/java/de/l3s/learnweb/user/UserDao.java b/src/main/java/de/l3s/learnweb/user/UserDao.java
index 7f04b3a57..67676f581 100644
--- a/src/main/java/de/l3s/learnweb/user/UserDao.java
+++ b/src/main/java/de/l3s/learnweb/user/UserDao.java
@@ -81,6 +81,9 @@ default Optional findByUsernameAndPassword(String username, String passwor
@SqlQuery("SELECT u.* FROM lw_user u JOIN lw_course_user USING(user_id) WHERE course_id = ? AND deleted = 0 ORDER BY username")
List findByCourseId(int courseId);
+ @SqlQuery("SELECT EXISTS (SELECT 1 FROM learnweb_large.sl_action WHERE user_id = ? AND `action` = 'resource_clicked')")
+ boolean isActiveUser(int userId);
+
@SqlQuery("SELECT u.* FROM lw_user u JOIN lw_group_user USING(user_id) WHERE group_id = ? AND deleted = 0 ORDER BY username")
List findByGroupId(int groupId);
diff --git a/src/main/resources/db/migration/V14__Update_annotations.sql b/src/main/resources/db/migration/V14__Update_annotations.sql
new file mode 100644
index 000000000..02798641b
--- /dev/null
+++ b/src/main/resources/db/migration/V14__Update_annotations.sql
@@ -0,0 +1,173 @@
+/* ================= `learnweb_large` schema ================= */
+CREATE SCHEMA IF NOT EXISTS `learnweb_large`;
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`sl_action` (
+ `search_id` INT(10) UNSIGNED NOT NULL,
+ `rank` SMALLINT(5) UNSIGNED NOT NULL,
+ `user_id` INT(10) UNSIGNED NOT NULL,
+ `action` ENUM ('resource_clicked','resource_saved') NOT NULL,
+ `timestamp` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP() ON UPDATE CURRENT_TIMESTAMP(),
+ KEY `sl_action_search_id` (`search_id`)
+ );
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`sl_query` (
+ `search_id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY,
+ `group_id` INT(10) UNSIGNED DEFAULT 0,
+ `query` VARCHAR(250) NOT NULL,
+ `mode` ENUM ('text','image','video','group') NOT NULL,
+ `service` ENUM ('bing','flickr','giphy','youtube','vimeo','ipernity','ted','tedx','loro','yovisto','learnweb','archiveit','teded','factcheck','desktop','internet','slideshare','speechrepository') NOT NULL,
+ `language` CHAR(5) DEFAULT NULL,
+ `filters` VARCHAR(1000) DEFAULT NULL,
+ `user_id` INT(10) UNSIGNED NOT NULL,
+ `timestamp` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP() ON UPDATE CURRENT_TIMESTAMP(),
+ `learnweb_version` TINYINT(4) NOT NULL DEFAULT 0 COMMENT 'which learnweb instance has inserted this row'
+ );
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`sl_resource` (
+ `search_id` INT(10) UNSIGNED NOT NULL,
+ `rank` SMALLINT(5) UNSIGNED NOT NULL,
+ `resource_id` INT(10) UNSIGNED DEFAULT NULL COMMENT 'id of a learnweb resource NULL otherwise',
+ `url` VARCHAR(1000) DEFAULT NULL COMMENT 'null if learnweb resource',
+ `title` VARCHAR(250) DEFAULT NULL COMMENT 'null if learnweb resource',
+ `description` VARCHAR(1000) DEFAULT NULL COMMENT 'null if learnweb resource',
+ `thumbnail_url` VARCHAR(1000) DEFAULT NULL,
+ `thumbnail_height` SMALLINT(5) UNSIGNED DEFAULT NULL,
+ `thumbnail_width` SMALLINT(5) UNSIGNED DEFAULT NULL,
+ PRIMARY KEY (`search_id`, `rank`)
+ );
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`speechrepository_video` (
+ `id` INT(11) NOT NULL PRIMARY KEY,
+ `title` VARCHAR(1000) NOT NULL,
+ `url` VARCHAR(1000) NOT NULL,
+ `rights` VARCHAR(1000) NOT NULL,
+ `date` VARCHAR(1000) NOT NULL,
+ `description` VARCHAR(1000) NOT NULL,
+ `notes` VARCHAR(2000) DEFAULT NULL,
+ `image_link` VARCHAR(1000) NOT NULL,
+ `video_link` VARCHAR(1000) NOT NULL,
+ `duration` INT(11) NOT NULL,
+ `language` VARCHAR(1000) NOT NULL,
+ `level` VARCHAR(1000) DEFAULT NULL,
+ `use` VARCHAR(1000) DEFAULT NULL,
+ `type` VARCHAR(1000) DEFAULT NULL,
+ `domains` VARCHAR(1000) DEFAULT NULL,
+ `terminology` TEXT DEFAULT NULL,
+ `learnweb_resource_id` INT(10) UNSIGNED NOT NULL DEFAULT 0,
+ KEY `speechrepository_video_learnweb_resource_id` (`learnweb_resource_id`)
+ );
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`ted_transcripts_lang_mapping` (
+ `language_code` CHAR(10) NOT NULL,
+ `language` CHAR(25) NOT NULL,
+ PRIMARY KEY (`language_code`, `language`)
+ );
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`ted_transcripts_paragraphs` (
+ `resource_id` INT(10) UNSIGNED NOT NULL,
+ `language` CHAR(10) NOT NULL,
+ `starttime` INT(10) UNSIGNED NOT NULL,
+ `paragraph` LONGTEXT NOT NULL,
+ KEY `ted_transcripts_paragraphs_resource_id` (`resource_id`, `language`)
+ );
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`ted_video` (
+ `ted_id` INT(10) UNSIGNED NOT NULL DEFAULT 0 PRIMARY KEY,
+ `resource_id` INT(10) UNSIGNED NOT NULL DEFAULT 0,
+ `title` VARCHAR(200) NOT NULL,
+ `description` MEDIUMTEXT NOT NULL,
+ `slug` VARCHAR(200) NOT NULL,
+ `viewed_count` INT(10) UNSIGNED NOT NULL,
+ `published_at` TIMESTAMP NULL DEFAULT NULL,
+ `talk_updated_at` TIMESTAMP NULL DEFAULT NULL,
+ `photo1_url` VARCHAR(255) DEFAULT NULL,
+ `photo1_width` SMALLINT(6) UNSIGNED NOT NULL DEFAULT 0,
+ `photo1_height` SMALLINT(6) UNSIGNED NOT NULL DEFAULT 0,
+ `photo2_url` VARCHAR(255) DEFAULT NULL,
+ `photo2_width` SMALLINT(6) NOT NULL DEFAULT 0,
+ `photo2_height` SMALLINT(6) NOT NULL DEFAULT 0,
+ `tags` MEDIUMTEXT NOT NULL,
+ `duration` SMALLINT(6) UNSIGNED NOT NULL DEFAULT 0,
+ `json` MEDIUMTEXT DEFAULT NULL,
+ KEY `ted_video_slug` (`slug`)
+ );
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`wb2_url` (
+ `url_id` BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY,
+ `url` VARCHAR(200) NOT NULL,
+ `first_capture` TIMESTAMP NULL DEFAULT NULL,
+ `last_capture` TIMESTAMP NULL DEFAULT NULL,
+ `all_captures_fetched` BOOLEAN NOT NULL DEFAULT 0 COMMENT '1 when all captures have been loaded into wb_url_capture; 0 else',
+ `update_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP(),
+ KEY `wb2_url_url` (`url`)
+ );
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`wb2_url_capture` (
+ `url_id` BIGINT(20) NOT NULL,
+ `timestamp` TIMESTAMP NULL DEFAULT NULL,
+ KEY `wb2_url_capture_url_id` (`url_id`)
+ );
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`wb_url` (
+ `url_id` BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY,
+ `url` VARCHAR(200) NOT NULL,
+ `first_capture` TIMESTAMP NULL DEFAULT NULL,
+ `last_capture` TIMESTAMP NULL DEFAULT NULL,
+ `all_captures_fetched` BOOLEAN NOT NULL DEFAULT 0 COMMENT '1 when all captures have been loaded into wb_url_capture; 0 else',
+ `crawl_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP(),
+ `status_code` SMALLINT(6) NOT NULL DEFAULT -3,
+ `status_code_date` TIMESTAMP NOT NULL DEFAULT '1990-01-01 01:00:00',
+ UNIQUE KEY `wb_url_url_index` (`url`)
+ );
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`wb_url_content` (
+ `url_id` BIGINT(20) NOT NULL,
+ `status_code` SMALLINT(6) NOT NULL DEFAULT -3,
+ `content` LONGTEXT DEFAULT NULL,
+ `date` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP() ON UPDATE CURRENT_TIMESTAMP(),
+ KEY `wb_url_content_url_id` (`url_id`)
+ );
+
+/*=======================Annotation=================================*/
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`sl_recognised_entity` (
+ `entity_uri` int NOT NULL PRIMARY KEY AUTO_INCREMENT,
+ `uri` varchar(250) NOT NULL,
+ `user_id` INT(10) UNSIGNED NOT NULL,
+ `type` varchar(250) NOT NULL,
+ `surface_form` varchar(250) NOT NULL,
+ `session_id` varchar(10000) NOT NULL,
+ `confidence` double NOT NULL,
+ `input_id` text NOT NULL,
+ `created_at` timestamp NOT NULL
+);
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`sl_shared_object` ( /* TODO: redesign, flatten object and split into entity, link, entity_user_weight */
+ `id` int UNSIGNED NOT NULL PRIMARY KEY AUTO_INCREMENT,
+ `user_id` int NOT NULL,
+ `group_id` int NOT NULL,
+ `application` varchar(250) NOT NULL,
+ `shared_object` varchar(10000) NOT NULL,
+ `created_at` date NOT NULL
+);
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`sl_rdf` (
+ `id` int UNSIGNED NOT NULL PRIMARY KEY AUTO_INCREMENT,
+ `user_id` int NOT NULL,
+ `group_id` int NOT NULL,
+ `rdf_value` longtext NOT NULL
+);
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`sl_input_stream` (
+ `id` int NOT NULL PRIMARY KEY AUTO_INCREMENT,
+ `user_id` int NOT NULL,
+ `type` varchar(250) NOT NULL,
+ `object_id` INT UNSIGNED NOT NULL,
+ `content` longtext NOT NULL,
+ `date_created` date NOT NULL
+);
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`sl_search_entity` (
+ `search_id` int NOT NULL,
+ `entity_uri` int NOT NULL
+);
+
diff --git a/src/main/resources/db/migration/V15__Add_suggested_logs.sql b/src/main/resources/db/migration/V15__Add_suggested_logs.sql
new file mode 100644
index 000000000..ffb13207a
--- /dev/null
+++ b/src/main/resources/db/migration/V15__Add_suggested_logs.sql
@@ -0,0 +1,14 @@
+/* ================= `learnweb_large` schema ================= */
+CREATE SCHEMA IF NOT EXISTS `learnweb_large`;
+
+CREATE TABLE IF NOT EXISTS `learnweb_large`.`sl_suggested_query` (
+ `search_id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY,
+ `user_id` INT(10) UNSIGNED NOT NULL,
+ `reference_query` VARCHAR(255) NOT NULL,
+ `query` VARCHAR(255) NOT NULL,
+ `source` VARCHAR(255) NOT NULL,
+ `index` INT(10) NOT NULL,
+ `options` TEXT DEFAULT NULL,
+ `graph` TEXT DEFAULT NULL,
+ `timestamp` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP() ON UPDATE CURRENT_TIMESTAMP()
+);
diff --git a/src/main/resources/de/l3s/learnweb/lang/messages.properties b/src/main/resources/de/l3s/learnweb/lang/messages.properties
index 79641fc12..867ce0b18 100644
--- a/src/main/resources/de/l3s/learnweb/lang/messages.properties
+++ b/src/main/resources/de/l3s/learnweb/lang/messages.properties
@@ -1693,3 +1693,7 @@ your_info.sent_messages_msg = You have sent {0} messages
your_info.sent_msgs = Sent messages
your_information = Your information
+
+
+search_for_this = Do you want to search for this:
+recommendation = Recommendation
diff --git a/src/main/resources/ontology/pkgOnto.ttl b/src/main/resources/ontology/pkgOnto.ttl
new file mode 100644
index 000000000..3295ffb51
--- /dev/null
+++ b/src/main/resources/ontology/pkgOnto.ttl
@@ -0,0 +1,399 @@
+@prefix : .
+@prefix owl: .
+@prefix rdf: .
+@prefix xml: .
+@prefix xsd: .
+@prefix rdfs: .
+@base .
+
+
+ rdf:type owl:Ontology ;
+ owl:versionIRI ;
+ "Eleni Ilkou";
+ "Knowgraphs project" ;
+ "TIB – Leibniz Information Centre for Science and Technology, Hannover, Germany" ;
+ "CC-BY-SA 4.0" ;
+ "2021-04-19" ;
+ "en" ;
+ "PKG Ontology"@en ;
+ """PKG ontology describes a personalized knowledge graph for web search. The ontology is developed in OWL in Protege 5.0.0."""@en ;
+ "Web search engines thrive towards personalisation. At the same time, Personal Knowledge Graphs are gaining momentum as smaller graphs with user’s related data on top of larger Knowledge Graphs. The PKG ontology aims to model the constructive aspects of a personal knowledge graph in the backend of a web search application while addressing privacy and accessibility concerns and FAIR principles. This work presents an overview of the ontology description and discusses use cases."@en ;
+ owl:versionInfo "0.1.0 - first version"@en .
+
+# rdf:type owl:Ontology .
+
+#################################################################
+# Annotation properties
+#################################################################
+
+### http://www.w3.org/2004/02/skos/core#definition
+ rdf:type owl:AnnotationProperty .
+
+
+#################################################################
+# Datatypes
+#################################################################
+
+### http://www.w3.org/1999/02/22-rdf-syntax-ns#langString
+rdf:langString rdf:type rdfs:Datatype .
+
+
+#################################################################
+# Object Properties
+#################################################################
+
+### https://github.com/tibonto/PKGonto/contains
+ rdf:type owl:ObjectProperty ;
+ rdfs:subPropertyOf owl:topObjectProperty ;
+ rdfs:domain ;
+ rdfs:range ,
+ ;
+ rdfs:label "contains"@en ;
+ "The content of the search session"@en .
+
+
+### https://github.com/tibonto/PKGonto/createsInputStream
+ rdf:type owl:ObjectProperty ;
+ rdfs:subPropertyOf owl:topObjectProperty ;
+ rdfs:domain ,
+ ,
+ ;
+ rdfs:range ;
+ rdfs:label "createsInputStream"@en ;
+ "The different agents that are creating the input stream"@en .
+
+
+### https://github.com/tibonto/PKGonto/dependsOn
+ rdf:type owl:ObjectProperty ;
+ rdfs:subPropertyOf owl:topObjectProperty ;
+ rdfs:domain ;
+ rdfs:range ,
+ ;
+ rdfs:label "dependsOn"@en ;
+ "The parameters that the shared object depends on"@en .
+
+
+### https://github.com/tibonto/PKGonto/generatesResult
+ rdf:type owl:ObjectProperty ;
+ rdfs:subPropertyOf owl:topObjectProperty ;
+ rdfs:domain ;
+ rdfs:range ;
+ rdfs:label "generatesResult"@en ;
+ "The web results generated by the search query"@en .
+
+
+### https://github.com/tibonto/PKGonto/processes
+ rdf:type owl:ObjectProperty ;
+ rdfs:subPropertyOf owl:topObjectProperty ;
+ rdfs:domain ;
+ rdfs:range ;
+ rdfs:label "processes"@en ;
+ "The input of the classifier, the item that is processing"@en .
+
+
+### https://github.com/tibonto/educor#accessibility
+ rdf:type owl:ObjectProperty ;
+ rdfs:subPropertyOf owl:topObjectProperty ;
+ rdfs:domain ,
+ ,
+ ;
+ rdfs:range ;
+ rdfs:label "accessibility"@en ;
+ "The accessibility property"@en .
+
+
+### https://github.com/tibonto/educor#generatesLogs
+ rdf:type owl:ObjectProperty ;
+ rdfs:subPropertyOf owl:topObjectProperty ;
+ rdfs:domain ;
+ rdfs:range ;
+ rdfs:label "generatesLogs"@en ;
+ rdfs:seeAlso ;
+ "The activity that generates logs"@en .
+
+
+### https://github.com/tibonto/educor#hasProfile
+ rdf:type owl:ObjectProperty ;
+ rdfs:subPropertyOf owl:topObjectProperty ;
+ rdfs:domain ;
+ rdfs:range ;
+ rdfs:label "hasProfile"@en ;
+ rdfs:seeAlso ;
+ "The property that links to the profile of an agent"@en .
+
+
+### https://schema.org/memberOf
+ rdf:type owl:ObjectProperty ;
+ rdfs:subPropertyOf owl:topObjectProperty ;
+ rdfs:domain ;
+ rdfs:range ;
+ rdfs:label "memberOf"@en ;
+ "A group to which this Person belongs."@en .
+
+
+#################################################################
+# Data properties
+#################################################################
+
+### https://github.com/tibonto/PKGonto/groupType
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:string ;
+ rdfs:label "groupType"@en ;
+ "The type of a group (ex. private, public)."@en .
+
+### https://github.com/tibonto/PKGonto/surfaceForm
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:string ;
+ rdfs:label "surfaceForm"@en ;
+ "The surface form of a word is the form of a word as it appears in the text. It represents the form in which the recognized entity was found."@en .
+
+### https://github.com/tibonto/PKGonto/similarityScore
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:string ;
+ rdfs:label "similarityScore"@en ;
+ "The confidence score of the classifier for the given recognized entity."@en .
+
+### https://github.com/tibonto/PKGonto/username
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:string ;
+ rdfs:label "username"@en ;
+ "The username"@en .
+
+
+### https://schema.org/author
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ,
+ ,
+ ;
+ rdfs:range xsd:string ;
+ rdfs:label "author"@en ;
+ "The author of this content."@en .
+
+### https://schema.org/text
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:string ;
+ rdfs:label "text"@en ;
+ "The textual content of this CreativeWork."@en .
+
+
+### https://schema.org/dateCreated
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ,
+ ,
+ ,
+ ,
+ ,
+ ;
+ rdfs:range xsd:dateTimeStamp ;
+ rdfs:label "dateCreated"@en ;
+ "The date on which the CreativeWork was created or the item was added to a DataFeed."@en .
+
+
+### https://schema.org/description
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:string ;
+ rdfs:label "description"@en ;
+ "A description of the item."@en .
+
+
+### https://schema.org/email
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range rdf:langString ;
+ rdfs:label "email"@en ;
+ "An email"@en .
+
+
+### https://schema.org/endTime
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:dateTimeStamp ;
+ rdfs:label "endTime"@en ;
+ "The endTime of the search session"@en .
+
+
+### https://schema.org/headline
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:string ;
+ rdfs:label "headline"@en ;
+ "Headline of the article."@en .
+
+
+### https://schema.org/identifier
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:anyURI ;
+ rdfs:label "identifier"@en ;
+ "The URI of the recognized entity"@en .
+
+
+### https://schema.org/keywords
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:string ;
+ rdfs:label "keywords"@en ;
+ "Keywords or tags used to describe this content. Multiple entries in a keywords list are typically delimited by commas."@en .
+
+
+### https://schema.org/name
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ,
+ ;
+ rdfs:range xsd:string ;
+ rdfs:label "name"@en ;
+ "The name of an item"@en .
+
+
+### https://schema.org/startTime
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:dateTimeStamp ;
+ rdfs:label "startTime"@en ;
+ "The startTime of the search session"@en .
+
+
+### https://schema.org/text
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ,
+ ,
+ ,
+ ;
+ rdfs:range rdf:langString ;
+ rdfs:label "text"@en ;
+ "The textual content of this CreativeWork."@en .
+
+
+### https://schema.org/title
+ rdf:type owl:DatatypeProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:string ;
+ rdfs:label "title"@en ;
+ "The title of an item"@en .
+
+
+### https://schema.org/url
+ rdf:type owl:DatatypeProperty ;
+ rdfs:subPropertyOf owl:topDataProperty ;
+ rdfs:domain ;
+ rdfs:range xsd:anyURI ;
+ rdfs:label "url"@en ;
+ "URL of the item."@en .
+
+
+#################################################################
+# Classes
+#################################################################
+
+### http://xmlns.com/foaf/spec/#term_Group
+ rdf:type owl:Class ;
+ rdfs:label "Group"@en ;
+ "A group a user is member of."@en .
+
+
+### https://github.com/tibonto/PKGonto/InputStream
+ rdf:type owl:Class ;
+ rdfs:label "InputStream"@en ;
+ "The input stream to the classifier"@en .
+
+
+### https://github.com/tibonto/PKGonto/RecognizedEntities
+ rdf:type owl:Class ;
+ rdfs:label "RecognizedEntities"@en ;
+ "The recognized entities from the classifier"@en .
+
+
+### https://github.com/tibonto/PKGonto/SearchQuery
+ rdf:type owl:Class ;
+ rdfs:label "SearchQuery"@en ;
+ rdfs:seeAlso ;
+ "The search query the user is formulating during the web search"@en .
+
+
+### https://github.com/tibonto/PKGonto/SearchResult
+ rdf:type owl:Class ;
+ rdfs:label "SearchResult"@en ;
+ rdfs:seeAlso