GitLab wurde erfolgreich aktualisiert. Dank regelmäßiger Updates bleibt das THM GitLab sicher und Sie profitieren von den neuesten Funktionen. Danke für Ihre Geduld.

Commit 2a0b2945 authored by Daniel Gerhardt's avatar Daniel Gerhardt

Separate log entry persistance code and migrate it to Ektorp

parent 68f96741
......@@ -23,9 +23,12 @@ import com.fasterxml.jackson.databind.SerializationFeature;
import de.thm.arsnova.ImageUtils;
import de.thm.arsnova.connector.client.ConnectorClient;
import de.thm.arsnova.connector.client.ConnectorClientImpl;
import de.thm.arsnova.entities.LogEntry;
import de.thm.arsnova.entities.serialization.CouchDbDocumentModule;
import de.thm.arsnova.entities.serialization.CouchDbObjectMapperFactory;
import de.thm.arsnova.entities.serialization.View;
import de.thm.arsnova.persistance.LogEntryRepository;
import de.thm.arsnova.persistance.couchdb.CouchDbLogEntryRepository;
import de.thm.arsnova.persistance.couchdb.InitializingCouchDbConnector;
import de.thm.arsnova.socket.ARSnovaSocket;
import de.thm.arsnova.socket.ARSnovaSocketIOServer;
......@@ -265,6 +268,11 @@ public class AppConfig extends WebMvcConfigurerAdapter {
return factory;
}
@Bean
public LogEntryRepository logEntryRepository() throws Exception {
return new CouchDbLogEntryRepository(LogEntry.class, couchDbConnector(), false);
}
@Bean(name = "connectorClient")
public ConnectorClient connectorClient() {
if (!connectorEnable) {
......
......@@ -32,6 +32,7 @@ import de.thm.arsnova.entities.transport.ImportExportSession;
import de.thm.arsnova.entities.transport.ImportExportSession.ImportExportQuestion;
import de.thm.arsnova.events.NewAnswerEvent;
import de.thm.arsnova.exceptions.NotFoundException;
import de.thm.arsnova.persistance.LogEntryRepository;
import de.thm.arsnova.services.ISessionService;
import net.sf.ezmorph.Morpher;
import net.sf.ezmorph.MorpherRegistry;
......@@ -89,6 +90,9 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
@Autowired
private ISessionService sessionService;
private LogEntryRepository dbLogger;
@Autowired
private String databaseHost;
private int databasePort;
private String databaseName;
......@@ -136,43 +140,6 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
this.publisher = publisher;
}
@Override
public void log(String event, Map<String, Object> payload, LogEntry.LogLevel level) {
final Document d = new Document();
d.put("timestamp", System.currentTimeMillis());
d.put("type", "log");
d.put("event", event);
d.put("level", level.ordinal());
d.put("payload", payload);
try {
database.saveDocument(d);
} catch (final IOException e) {
logger.error("Logging of '{}' event to database failed.", event, e);
}
}
@Override
public void log(String event, Map<String, Object> payload) {
log(event, payload, LogEntry.LogLevel.INFO);
}
@Override
public void log(String event, LogEntry.LogLevel level, Object... rawPayload) {
if (rawPayload.length % 2 != 0) {
throw new IllegalArgumentException("");
}
Map<String, Object> payload = new HashMap<>();
for (int i = 0; i < rawPayload.length; i += 2) {
payload.put((String) rawPayload[i], rawPayload[i + 1]);
}
log(event, payload, level);
}
@Override
public void log(String event, Object... rawPayload) {
log(event, LogEntry.LogLevel.INFO, rawPayload);
}
@Override
public List<Session> getMySessions(final User user, final int start, final int limit) {
return this.getDatabaseDao().getSessionsForUsername(user.getUsername(), start, limit);
......@@ -861,7 +828,7 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
try {
int count = deleteAnswers(question);
deleteDocument(question.get_id());
log("delete", "type", "question", "answerCount", count);
dbLogger.log("delete", "type", "question", "answerCount", count);
return count;
} catch (final IOException e) {
......@@ -897,8 +864,8 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
}
int[] count = deleteAllAnswersWithQuestions(questions);
log("delete", "type", "question", "questionCount", count[0]);
log("delete", "type", "answer", "answerCount", count[1]);
dbLogger.log("delete", "type", "question", "questionCount", count[0]);
dbLogger.log("delete", "type", "answer", "answerCount", count[1]);
return count;
}
......@@ -932,7 +899,7 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
logger.error("Could not bulk delete answers.");
}
}
log("delete", "type", "answer", "answerCount", count);
dbLogger.log("delete", "type", "answer", "answerCount", count);
return count;
} catch (final IOException e) {
......@@ -1621,7 +1588,7 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
public void deleteAnswer(final String answerId) {
try {
database.deleteDocument(database.getDocument(answerId));
log("delete", "type", "answer");
dbLogger.log("delete", "type", "answer");
} catch (final IOException e) {
logger.error("Could not delete answer {}.", answerId, e);
}
......@@ -1631,7 +1598,7 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
public void deleteInterposedQuestion(final InterposedQuestion question) {
try {
deleteDocument(question.get_id());
log("delete", "type", "comment");
dbLogger.log("delete", "type", "comment");
} catch (final IOException e) {
logger.error("Could not delete interposed question {}.", question.get_id(), e);
}
......@@ -1736,7 +1703,7 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
count = deleteAllQuestionsWithAnswers(session);
deleteDocument(session.get_id());
logger.debug("Deleted session document {} and related data.", session.get_id());
log("delete", "type", "session", "id", session.get_id());
dbLogger.log("delete", "type", "session", "id", session.get_id());
} catch (final IOException e) {
logger.error("Could not delete session {}.", session, e);
}
......@@ -1762,7 +1729,7 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
if (!results.isEmpty()) {
logger.info("Deleted {} inactive guest sessions.", results.size());
log("cleanup", "type", "session", "sessionCount", results.size(), "questionCount", count[1], "answerCount", count[2]);
dbLogger.log("cleanup", "type", "session", "sessionCount", results.size(), "questionCount", count[1], "answerCount", count[2]);
}
count[0] = results.size();
......@@ -1788,7 +1755,7 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
newDocs.add(newDoc);
logger.debug("Marked logged_in document {} for deletion.", oldDoc.getId());
/* Use log type 'user' since effectively the user is deleted in case of guests */
log("delete", "type", "user", "id", oldDoc.getId());
dbLogger.log("delete", "type", "user", "id", oldDoc.getId());
}
if (!newDocs.isEmpty()) {
......@@ -1802,7 +1769,7 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
if (count > 0) {
logger.info("Deleted {} visited session lists of inactive users.", count);
log("cleanup", "type", "visitedsessions", "count", count);
dbLogger.log("cleanup", "type", "visitedsessions", "count", count);
}
return count;
......@@ -2104,7 +2071,7 @@ public class CouchDBDao implements IDatabaseDao, ApplicationEventPublisherAware
}
/* This does account for failed deletions */
log("delete", "type", "comment", "commentCount", results.size());
dbLogger.log("delete", "type", "comment", "commentCount", results.size());
return results.size();
}
......
......@@ -30,46 +30,6 @@ import java.util.Map;
* All methods the database must support.
*/
public interface IDatabaseDao {
/**
* Logs an event to the database. Arbitrary data can be attached as payload. Database logging should only be used
* if the logged data is later analyzed by the backend itself. Otherwise use the default logging mechanisms.
*
* @param event type of the event
* @param payload arbitrary logging data
* @param level severity of the event
*/
void log(String event, Map<String, Object> payload, LogEntry.LogLevel level);
/**
* Logs an event of informational severity to the database. Arbitrary data can be attached as payload. Database
* logging should only be used if the logged data is later analyzed by the backend itself. Otherwise use the default
* logging mechanisms.
*
* @param event type of the event
* @param payload arbitrary logging data
*/
void log(String event, Map<String, Object> payload);
/**
* Logs an event to the database. Arbitrary data can be attached as payload. Database logging should only be used
* if the logged data is later analyzed by the backend itself. Otherwise use the default logging mechanisms.
*
* @param event type of the event
* @param level severity of the event
* @param rawPayload key/value pairs of arbitrary logging data
*/
void log(String event, LogEntry.LogLevel level, Object... rawPayload);
/**
* Logs an event of informational severity to the database. Arbitrary data can be attached as payload. Database
* logging should only be used if the logged data is later analyzed by the backend itself. Otherwise use the default
* logging mechanisms.
*
* @param event type of the event
* @param rawPayload key/value pairs of arbitrary logging data
*/
void log(String event, Object... rawPayload);
Session getSessionFromKeyword(String keyword);
List<Session> getMySessions(User user, final int start, final int limit);
......
......@@ -17,9 +17,13 @@
*/
package de.thm.arsnova.entities;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonView;
import de.thm.arsnova.entities.serialization.View;
import java.util.Map;
public class LogEntry {
public class LogEntry implements Entity {
public enum LogLevel {
TRACE,
DEBUG,
......@@ -31,69 +35,78 @@ public class LogEntry {
private String id;
private String rev;
private long timestamp;
private long timestamp = System.currentTimeMillis();
private String event;
private int level;
private Map<String, Object> payload;
public String getId() {
return id;
public LogEntry(@JsonProperty String event, @JsonProperty int level, @JsonProperty Map<String, Object> payload) {
this.event = event;
this.level = level;
this.payload = payload;
}
public void setId(String id) {
this.id = id;
@JsonView(View.Persistence.class)
public String getId() {
return id;
}
/* CouchDB deserialization */
public void set_id(String id) {
@JsonView(View.Persistence.class)
public void setId(final String id) {
this.id = id;
}
public String getRev() {
return rev;
}
public void setRev(String rev) {
@JsonView(View.Persistence.class)
public void setRevision(final String rev) {
this.rev = rev;
}
/* CouchDB deserialization */
public void set_rev(String rev) {
this.rev = rev;
@JsonView(View.Persistence.class)
public String getRevision() {
return rev;
}
@JsonView(View.Persistence.class)
public long getTimestamp() {
return timestamp;
}
@JsonView(View.Persistence.class)
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
@JsonView(View.Persistence.class)
public String getEvent() {
return event;
}
@JsonView(View.Persistence.class)
public void setEvent(String event) {
this.event = event;
}
@JsonView(View.Persistence.class)
public int getLevel() {
return level;
}
@JsonView(View.Persistence.class)
public void setLevel(int level) {
this.level = level;
}
@JsonView(View.Persistence.class)
public void setLevel(LogLevel level) {
this.level = level.ordinal();
}
@JsonView(View.Persistence.class)
public Map<String, Object> getPayload() {
return payload;
}
@JsonView(View.Persistence.class)
public void setPayload(Map<String, Object> payload) {
this.payload = payload;
}
......
......@@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.fasterxml.jackson.databind.util.Converter;
import de.thm.arsnova.entities.Entity;
import de.thm.arsnova.entities.LogEntry;
import java.util.HashMap;
import java.util.Map;
......@@ -28,6 +29,7 @@ public class CouchDbTypeFieldConverter implements Converter<Class<? extends Enti
private static final Map<Class<? extends Entity>, String> typeMapping = new HashMap<>();
{
typeMapping.put(LogEntry.class, "log");
}
@Override
......
/*
* This file is part of ARSnova Backend.
* Copyright (C) 2012-2017 The ARSnova Team
*
* ARSnova Backend is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ARSnova Backend is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package de.thm.arsnova.persistance;
import de.thm.arsnova.entities.LogEntry;
import java.util.HashMap;
import java.util.Map;
public interface LogEntryRepository {
/**
* Logs an event to the database. Arbitrary data can be attached as payload. Database logging should only be used
* if the logged data is later analyzed by the backend itself. Otherwise use the default logging mechanisms.
*
* @param event type of the event
* @param level severity of the event
* @param payload arbitrary logging data
*/
void create(String event, LogEntry.LogLevel level, Map<String, Object> payload);
/**
* Logs an event to the database. Arbitrary data can be attached as payload. Database logging should only be used
* if the logged data is later analyzed by the backend itself. Otherwise use the default logging mechanisms.
*
* @param event type of the event
* @param payload arbitrary logging data
* @param level severity of the event
*/
default void log(String event, Map<String, Object> payload, LogEntry.LogLevel level) {
create(event, level, payload);
}
/**
* Logs an event of informational severity to the database. Arbitrary data can be attached as payload. Database
* logging should only be used if the logged data is later analyzed by the backend itself. Otherwise use the default
* logging mechanisms.
*
* @param event type of the event
* @param payload arbitrary logging data
*/
default void log(String event, Map<String, Object> payload) {
create(event, LogEntry.LogLevel.INFO, payload);
}
/**
* Logs an event to the database. Arbitrary data can be attached as payload. Database logging should only be used
* if the logged data is later analyzed by the backend itself. Otherwise use the default logging mechanisms.
*
* @param event type of the event
* @param level severity of the event
* @param rawPayload key/value pairs of arbitrary logging data
*/
default void log(String event, LogEntry.LogLevel level, Object... rawPayload) {
if (rawPayload.length % 2 != 0) {
throw new IllegalArgumentException("");
}
Map<String, Object> payload = new HashMap<>();
for (int i = 0; i < rawPayload.length; i += 2) {
payload.put((String) rawPayload[i], rawPayload[i + 1]);
}
create(event, level, payload);
}
/**
* Logs an event of informational severity to the database. Arbitrary data can be attached as payload. Database
* logging should only be used if the logged data is later analyzed by the backend itself. Otherwise use the default
* logging mechanisms.
*
* @param event type of the event
* @param rawPayload key/value pairs of arbitrary logging data
*/
default void log(String event, Object... rawPayload) {
log(event, LogEntry.LogLevel.INFO, rawPayload);
}
}
/*
* This file is part of ARSnova Backend.
* Copyright (C) 2012-2017 The ARSnova Team
*
* ARSnova Backend is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ARSnova Backend is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package de.thm.arsnova.persistance.couchdb;
import de.thm.arsnova.entities.LogEntry;
import de.thm.arsnova.persistance.LogEntryRepository;
import org.ektorp.CouchDbConnector;
import org.ektorp.support.CouchDbRepositorySupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
public class CouchDbLogEntryRepository extends CouchDbRepositorySupport<LogEntry> implements LogEntryRepository {
private static final Logger logger = LoggerFactory.getLogger(CouchDbLogEntryRepository.class);
public CouchDbLogEntryRepository(Class<LogEntry> type, CouchDbConnector db, boolean createIfNotExists) {
super(type, db, createIfNotExists);
}
@Override
public void create(String event, LogEntry.LogLevel level, Map<String, Object> payload) {
LogEntry log = new LogEntry(event, level.ordinal(), payload);
try {
db.create(log);
} catch (final IllegalArgumentException e) {
logger.error("Logging of '{}' event to database failed.", event, e);
}
}
}
......@@ -110,26 +110,6 @@ public class StubDatabaseDao implements IDatabaseDao {
return (stubSessions.get(keyword) == null);
}
@Override
public void log(String event, Map<String, Object> payload, LogEntry.LogLevel level) {
// TODO Auto-generated method stub
}
@Override
public void log(String event, Map<String, Object> payload) {
// TODO Auto-generated method stub
}
@Override
public void log(String event, LogEntry.LogLevel level, Object... rawPayload) {
// TODO Auto-generated method stub
}
@Override
public void log(String event, Object... rawPayload) {
// TODO Auto-generated method stub
}
@Override
public Session getSessionFromKeyword(String keyword) {
return stubSessions.get(keyword);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment