This allows us to prevent users from hammering the API every few seconds to check if any of their packages were updated. Real world users check as often as every 5 or 10 seconds. Signed-off-by: Florian Pritz <bluewind@xinu.at> --- Basic idea for a rate limiting solution. Currently the cleanup of old entries is done on each api request. That may be a bit excessive, but I didn't find a cronjob to put it and given the table is indexed, the query is probably fast enough. At least for a PoC this should be fine. Tell me what you think. conf/config.proto | 4 +++ upgrading/4.7.0.txt | 11 ++++++++ web/lib/aurjson.class.php | 71 +++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 86 insertions(+) create mode 100644 upgrading/4.7.0.txt diff --git a/conf/config.proto b/conf/config.proto index 1750929..934d369 100644 --- a/conf/config.proto +++ b/conf/config.proto @@ -36,6 +36,10 @@ enable-maintenance = 1 maintenance-exceptions = 127.0.0.1 render-comment-cmd = /usr/local/bin/aurweb-rendercomment +[ratelimit] +request_limit = 4000 +window_length = 86400 + [notifications] notify-cmd = /usr/local/bin/aurweb-notify sendmail = /usr/bin/sendmail diff --git a/upgrading/4.7.0.txt b/upgrading/4.7.0.txt new file mode 100644 index 0000000..1b51cd2 --- /dev/null +++ b/upgrading/4.7.0.txt @@ -0,0 +1,11 @@ +1. Add ApiRateLimit table: + +--- +CREATE TABLE `ApiRateLimit` ( + `ip` varchar(45) CHARACTER SET ascii COLLATE ascii_bin NOT NULL, + `requests` int(11) NOT NULL, + `window_start` bigint(20) NOT NULL, + PRIMARY KEY (`ip`), + KEY `window_start_idx` (`window_start`) +) ENGINE=InnoDB; +--- diff --git a/web/lib/aurjson.class.php b/web/lib/aurjson.class.php index 9eeaafd..8a6dd7b 100644 --- a/web/lib/aurjson.class.php +++ b/web/lib/aurjson.class.php @@ -96,6 +96,11 @@ public function handle($http_data) { $this->dbh = DB::connect(); + if ($this->check_ratelimit($_SERVER['REMOTE_ADDR'])) { + header("HTTP/1.1 429 Too Many Requests"); + return $this->json_error('Rate limit reached'); + } + $type = str_replace('-', '_', $http_data['type']); if ($type == 'info' && $this->version >= 5) { $type = 'multiinfo'; @@ -130,6 +135,72 @@ public function handle($http_data) { } } + /* + * Check if an IP needs to be rate limited. + * + * @param $ip IP of the current request + * + * @return true if IP needs to be rate limited, false otherwise. + */ + private function check_ratelimit($ip) { + $limit = config_get("ratelimit", "request_limit"); + $window_length = config_get("ratelimit", "window_length"); + $this->update_ratelimit($ip); + $stmt = $this->dbh->prepare(" + SELECT requests,window_start FROM ApiRateLimit + WHERE ip = :ip"); + $stmt->bindParam(":ip", $ip); + $result = $stmt->execute(); + + if (!$result) { + return false; + } + + $row = $stmt->fetch(PDO::FETCH_ASSOC); + + if ($row['window_start'] < time() - $window_length) { + $stmt = $this->dbh->prepare(" + DELETE FROM ApiRateLimit + WHERE ip = :ip"); + $stmt->bindParam(":ip", $ip); + $stmt->execute(); + return false; + } + + if ($row['requests'] > $limit) { + return true; + } + return false; + } + + /* + * Update a rate limit for an IP by increasing it's requests value by one. + * + * @param $ip IP of the current request + * + * @return void + */ + private function update_ratelimit($ip) { + $window_length = config_get("ratelimit", "window_length"); + $time = time(); + $deletion_time = $time - $window_length; + $stmt = $this->dbh->prepare(" + INSERT INTO ApiRateLimit + (ip, requests, window_start) + VALUES (:ip, 0, :window_start) + ON DUPLICATE KEY UPDATE requests=requests+1"); + $stmt->bindParam(":ip", $ip); + $stmt->bindParam(":window_start", $time); + $stmt->execute(); + + // TODO: this should move into some cronjob + $stmt = $this->dbh->prepare(" + DELETE FROM ApiRateLimit + WHERE window_start < :time"); + $stmt->bindParam(":time", $deletion_time); + $stmt->execute(); + } + /* * Returns a JSON formatted error string. * -- 2.15.1