Whoa stuff

This commit is contained in:
Colin McDonald 2016-05-05 17:38:28 -04:00
parent 53c7222107
commit 1b05b16fed
11 changed files with 37 additions and 28 deletions

View File

@ -1,5 +1,6 @@
general.id=main
general.releaseStage=production
logging.level=info
mongo.address=ds055505.mongolab.com
mongo.port=55505
mongo.database=minehqapi
@ -15,6 +16,6 @@ twillio.authToken=982592505a171d3be6b0722f5ecacc0e
mandrill.apiKey=0OYtwymqJP6oqvszeJu0vQ
librato.email=cmcdonald.main@gmail.com
librato.apiKey=a818c3eca8a59d6d9cf76dc9f0d237c6aa97f257c482ce3363cf55a5431bc153
bugsnag.apiKey=
bugsnag.apiKey=0e47fba8b825416b7cbc839066184509
auth.permittedUserRanks=developer,owner
auth.websiteApiKey=RVbp4hY6sCFVaf

11
pom.xml
View File

@ -17,6 +17,17 @@
<encoding>UTF-8</encoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<archive>
<manifest>
<mainClass>net.frozenorb.apiv3.Main</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>

View File

@ -68,9 +68,9 @@ public final class APIv3 {
.create();
APIv3() {
//System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "trace");
setupConfig();
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", config.getProperty("logging.level"));
setupDatabase();
setupRedis();
setupMetrics();
@ -89,13 +89,15 @@ public final class APIv3 {
private void setupDatabase() {
MongoClient mongoClient = new MongoClient(new ServerAddress(
config.getProperty("mongo.address"),
Integer.parseInt(config.getProperty("mongo.port"))),
Integer.parseInt(config.getProperty("mongo.port")))/*,
ImmutableList.of(
MongoCredential.createCredential(
config.getProperty("mongo.username"),
config.getProperty("mongo.database"),
config.getProperty("mongo.password").toCharArray())
));
)*/);
// TODO: DISABLE CREDS IF NOT NEEDED
MorphiaLoggerFactory.reset();
MorphiaLoggerFactory.registerLogger(SLF4JLoggerImplFactory.class);
@ -136,6 +138,9 @@ public final class APIv3 {
private void setupBugsnag() {
bugsnagClient = new Client(config.getProperty("bugsnag.apiKey"));
bugsnagClient.setReleaseStage(config.getProperty("general.releaseStage"));
bugsnagClient.setProjectPackages("net.frozenorb.apiv3");
// TODO: Use .setLogger to use slf4j with this
}
private void setupHttp() {

View File

@ -1,6 +1,5 @@
package net.frozenorb.apiv3.filters;
import com.sun.xml.internal.messaging.saaj.util.Base64;
import net.frozenorb.apiv3.APIv3;
import net.frozenorb.apiv3.actors.*;
import net.frozenorb.apiv3.models.Server;
@ -11,6 +10,8 @@ import spark.Request;
import spark.Response;
import spark.Spark;
import java.util.Base64;
public final class ActorAttributeFilter implements Filter {
public void handle(Request req, Response res) {
@ -29,7 +30,7 @@ public final class ActorAttributeFilter implements Filter {
@SuppressWarnings("deprecation") // We purposely get the User by their last username.
private Actor processBasicAuthorization(String authHeader, Response res) {
String encodedHeader = authHeader.substring("Basic ".length());
String[] credentials = Base64.base64Decode(encodedHeader).split(":");
String[] credentials = new String(Base64.getDecoder().decode(encodedHeader.getBytes())).split(":");
if (credentials.length == 2) {
User user = User.byLastUsername(credentials[0]);

View File

@ -15,11 +15,8 @@ public final class MetricsAfterFilter implements Filter {
public void handle(Request req, Response res) {
responseLengthMetric.update(req.contentLength());
Timer.Context timerMetricActorType = req.attribute("timerMetric.actorType");
Timer.Context timerMetricGlobal = req.attribute("timerMetric.global");
timerMetricActorType.stop();
timerMetricGlobal.stop();
Timer.Context timerMetric = req.attribute("timerMetric");
timerMetric.stop();
}
}

View File

@ -3,22 +3,16 @@ package net.frozenorb.apiv3.filters;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import net.frozenorb.apiv3.APIv3;
import net.frozenorb.apiv3.actors.Actor;
import spark.Filter;
import spark.Request;
import spark.Response;
public final class MetricsBeforeFilter implements Filter {
private Timer responseTimesGlobalMetric = APIv3.getMetrics().timer(MetricRegistry.name(getClass(), "responseTimes", "global"));
private Timer responseTimesMetric = APIv3.getMetrics().timer(MetricRegistry.name("apiv3", "http", "responseTimes"));
public void handle(Request req, Response res) {
Actor actor = req.attribute("actor");
String metricName = MetricRegistry.name(getClass(), "responseTimes", actor.getType().name());
Timer responseTimesActorTypeMetric = APIv3.getMetrics().timer(metricName);
req.attribute("timerMetric.global", responseTimesGlobalMetric.time());
req.attribute("timerMetric.actorType", responseTimesActorTypeMetric.time());
req.attribute("timerMetric", responseTimesMetric.time());
}
}

View File

@ -18,7 +18,7 @@ public final class ServerGroup {
// We define these HashSets up here because, in the event they're
// empty, Morphia will load them as null, not empty sets.
@Getter @Setter @ExcludeFromReplies private Set<String> announcements = new HashSet<>();
@Getter private Map<String, List<String>> permissions = new HashMap<>();
@Getter @ExcludeFromReplies private Map<String, List<String>> permissions = new HashMap<>();
public static ServerGroup byId(String id) {
return APIv3.getDatastore().createQuery(ServerGroup.class).field("id").equal(id).get();

View File

@ -30,7 +30,7 @@ public final class GETDump implements Route {
});
return effectedUsers;
case "accessDeniable":
case "accessdeniable": // Lowercase d because we convert to lowercase above
// We have to name it effectedUsers2 because Java's
// scoping in switch statements is really dumb.
List<UUID> effectedUsers2 = new ArrayList<>();

View File

@ -1,5 +1,6 @@
package net.frozenorb.apiv3.routes;
import lombok.extern.slf4j.Slf4j;
import net.frozenorb.apiv3.APIv3;
import net.frozenorb.apiv3.utils.ErrorUtils;
import spark.Request;
@ -7,9 +8,11 @@ import spark.Response;
import spark.Route;
import spark.Spark;
@Slf4j
public final class NotFound implements Route {
public Object handle(Request req, Response res) {
log.info(req.requestMethod().toUpperCase() + " " + req.url());
Spark.halt(404, APIv3.getGson().toJson(ErrorUtils.notFound("Route", req.url())));
return null;
}

View File

@ -13,7 +13,7 @@ public final class POSTServer implements Route {
public Object handle(Request req, Response res) {
String id = req.queryParams("id");
String bungeeId = req.queryParams("id");
String bungeeId = req.queryParams("bungeeId");
String displayName = req.queryParams("displayName");
String apiKey = req.queryParams("apiKey");
ServerGroup group = ServerGroup.byId(req.queryParams("group"));

View File

@ -13,11 +13,8 @@ import spark.Response;
public final class LoggingExceptionHandler implements ExceptionHandler {
public void handle(Exception ex, Request req, Response res) {
Timer.Context timerMetricActorType = req.attribute("timerMetric.actorType");
Timer.Context timerMetricGlobal = req.attribute("timerMetric.global");
timerMetricActorType.stop();
timerMetricGlobal.stop();
Timer.Context timerMetric = req.attribute("timerMetric");
timerMetric.stop();
String code = new ObjectId().toHexString();