diff --git a/build-ami.py b/build-ami.py index 7b20b94..af7f2b0 100755 --- a/build-ami.py +++ b/build-ami.py @@ -18,7 +18,7 @@ env.ec2_amis = ['ami-fb68f8cb'] # Ubuntu 12.04 LTS amd64 EBS env.ec2_keypair = 'MinecraftEC2' env.ec2_secgroups = ['minecraft'] -env.ec2_instancetype = 'm1.small' +env.ec2_instancetype = 'm3.medium' env.ec2_userdata = open('cloud-config').read() diff --git a/modules/msm/manifests/init.pp b/modules/msm/manifests/init.pp index 9660d21..c092a44 100644 --- a/modules/msm/manifests/init.pp +++ b/modules/msm/manifests/init.pp @@ -15,7 +15,7 @@ require => Group['minecraft'], } - $repo_url = 'git://github.com/toffer/minecraft-server-manager.git' + $repo_url = 'git://github.com/marcuswhybrow/minecraft-server-manager.git' $repo_dir = '/home/minecraft/msm' exec {'git_clone_msm': user => 'minecraft', @@ -124,6 +124,16 @@ require => Exec['msm_server_create'], } + # PRONE TO BREAKING IF SERVER CHANGES + # Changes eula to true to accept (required to run server) + file {'/opt/msm/servers/default/eula.txt': + owner => 'minecraft', + group => 'minecraft', + mode => 0664, + content => template('msm/eula.txt'), + require => Exec['msm_server_create'], + } + exec {'msm_jar': user => root, command => "/etc/init.d/msm default jar minecraft", @@ -174,6 +184,7 @@ hasstatus => false, status => "ps -ef | grep -v grep | grep -i 'screen.*msm.*java.*Xms.*jar'", require => [File['/opt/msm/servers/default/server.properties', + '/opt/msm/servers/default/eula.txt', '/opt/msm/servers/default/world', '/opt/msm/servers/default/active'], Exec['update-rc.d msm defaults 97 03']], diff --git a/modules/msm/templates/eula.txt b/modules/msm/templates/eula.txt new file mode 100644 index 0000000..ac74b96 --- /dev/null +++ b/modules/msm/templates/eula.txt @@ -0,0 +1 @@ +eula=true \ No newline at end of file diff --git a/modules/msm_backup_restore/files/bin/msm-pre-shutdown-backup.sh b/modules/msm_backup_restore/files/bin/msm-pre-shutdown-backup.sh index 80043cb..cb8c2cf 100755 --- a/modules/msm_backup_restore/files/bin/msm-pre-shutdown-backup.sh +++ b/modules/msm_backup_restore/files/bin/msm-pre-shutdown-backup.sh @@ -11,9 +11,6 @@ VENV_PYTHON='/usr/local/venv/bin/python' # Shutdown msm $MSM stop -# Logroll -$MSM all logroll -find /opt/msm/servers -name 'server.log.offset' -exec rm '{}' ';' # Backup working files BACKUP_WORKING_FILES=/usr/local/bin/msm-backup-working-files-to-s3.py diff --git a/modules/msm_backup_restore/files/bin/msm-track-active-players.py b/modules/msm_backup_restore/files/bin/msm-track-active-players.py index 8158011..7021940 100755 --- a/modules/msm_backup_restore/files/bin/msm-track-active-players.py +++ b/modules/msm_backup_restore/files/bin/msm-track-active-players.py @@ -1,12 +1,15 @@ #!/usr/bin/env python import os +import sys import re import requests -import time +import syslog +import hashlib +import gzip +from operator import itemgetter from datetime import datetime -from pygtail import Pygtail from sqlalchemy import create_engine from sqlalchemy import exc from sqlalchemy.ext.declarative import declarative_base @@ -16,6 +19,7 @@ # Convert from Heroku style DATABASE_URL to Sqlalchemy style, if necessary db_url = os.environ.get('DATABASE_URL') DATABASE_URL = re.sub('^postgres:', 'postgresql:', db_url) +OFFSET_FILE_NAME = '.last-log-offset' engine = create_engine(DATABASE_URL, poolclass=NullPool) Base = declarative_base(engine) @@ -87,26 +91,201 @@ def logout(session, player_name, logout_time): update({'logout': logout_dt}) session.commit() +def get_matching_sha1(fileobj, num_bytes, target_sha1): + ''' + checks to see if a partial log file matches the sha1 hash + of the last read file + + returns a hashlib object of the matching sha1 hash. None if no match + ''' + + try: + fileobj.seek(0) + trunc_file_data = fileobj.read(num_bytes) + + if len(trunc_file_data) == num_bytes: + sha1 = hashlib.sha1() + sha1.update(trunc_file_data) + if sha1.hexdigest() == target_sha1: + return sha1 + except IOError as ioe: + syslog.syslog(syslog.LOG_WARNING, 'Cant read logfile %s: %s' % + (fileobj.name, ioe.message)) + return None + +def _parse_log_dir(logdir): + ''' + parses the passed in log directory for log file names. + returns a list of tuples of 3 items: (datetime, num, filename) + + datetime object corresponds to the date of the log file. Note + that 'lastest.log' will have datetime set to datettime.today() + and num of maxint + + num corresponds to the -# that follows the log's YYYY-MM-DD. + + the filename is the actual filename of the log (basename only) + ''' + + logname_re = re.compile(r'^(?P\d{4}-\d{2}-\d{2}|latest)' + r'-?(?P\d+|\b).log(.gz|\b)$') + for log_name in os.listdir(logdir): + match = logname_re.match(log_name) + if match: + date = match.group('date') + num = match.group('num') + if date == 'latest': + date = datetime.today() + num = sys.maxint + else: + date = datetime.strptime(date, '%Y-%m-%d') + num = int(num) + yield (date, num, log_name) + +def get_sorted_log_list(logdir): + ''' + sorts the list from _parse_log_dir by the datetime and num + of the log file in order of most recent date -> earliest. + + returns a list of logfile paths and the log date in string format + ''' + return [(os.path.join(logdir, p[2]), p[0].strftime('%Y-%m-%d')) + for p in sorted(_parse_log_dir(logdir), + key=itemgetter(0,1), + reverse=True)] + +def iterate_log_and_save(logdate, logfile, offset=0, sha1=None): + ''' + iterates through the lines in a logfile (starting from whereever + the filepointer is) and will subsequently save the length off + the file and it's sha1 hash into a hidden file for later reference. + + This should be used as a generator + + optionally, the starting offset and sha1 hashlib object can be + passed in case we are not starting at the beginning of the file + ''' + if not sha1: + sha1 = hashlib.sha1() + for line in logfile: + offset += len(line) + sha1.update(line) + yield '[%s]%s' % (logdate, line) + + record_last_offset(logfile, offset, sha1) + +def record_last_offset(logfile, offset, sha1): + ''' + records the length and sha1 hash of a file in the same directory + as the file with a hidden file + ''' + + logdir = os.path.dirname(logfile.name) + offset_file_path = os.path.join(logdir, OFFSET_FILE_NAME) + with open(offset_file_path, 'wb') as offset_file: + offset_file = open(offset_file_path, 'wb') + offset_file.write('%d\n%s\n' % (offset, sha1.hexdigest())) + +def get_last_offset(logdir): + ''' + attempts to retrieve the last length and sha1 hash of the log file + read by this script. + + returns a tuple (offset, sha1 hexdigest) + + If the file is not present, or unreadable, a -1 will be returned + for the offset + ''' + offset_file_path = os.path.join(logdir, OFFSET_FILE_NAME) + offdata = (-1, None) + if not os.path.isfile(offset_file_path): + return offdata + + of_lines = ['-1', ''] + try: + with open(offset_file_path, 'rb') as of: + of = open(offset_file_path, 'rb') + of_lines = of.readlines() + except IOError as ioe: + syslog.syslog(syslog.LOG_WARNING, + 'Could not open the last offset file %s : %s' % + (offset_file_path, ioe.message)) + + return (int(of_lines[0].strip('\n'), 10), of_lines[1].strip('\n')) + + +def iterate_unread_lines(logdir): + ''' + given a logdir, this functions as a generator to + read through all unread lines in a directory. + + this function will save the offset and sha1 hash of + the last read file. + ''' + + offset, last_sha1_digest = get_last_offset(logdir) + latest_path = os.path.join(logdir, 'latest.log') + todaysdate = datetime.today().strftime('%Y-%m-%d') + + #no existing data, just iterate through the latest.log + if offset < 0: + with open(latest_path, 'rb') as latest: + for line in iterate_log_and_save(todaysdate, latest): + yield line + else: + log_list = get_sorted_log_list(logdir) + log_process_sequence = [log_list[0]] + sha1_obj = None + for i, log_data in enumerate(log_list): + logpath, logdate = log_data + open_op = gzip.open if 'log.gz' in logpath\ + else open + with open_op(logpath, 'rb') as logfile: + sha1_obj = get_matching_sha1(logfile, offset, last_sha1_digest) + if sha1_obj: + log_process_sequence = log_list[:i] + log_process_sequence.reverse() + for line in iterate_log_and_save(logdate, logfile, offset, sha1_obj): + yield line + break + + if log_process_sequence: + last_log_id = len(log_process_sequence) - 1 + for i, log_data in enumerate(log_process_sequence): + logpath, logdate = log_data + open_op = gzip.open if 'log.gz' in logpath\ + else open + with open_op(logpath, 'rb') as logfile: + if i == last_log_id: + for line in iterate_log_and_save(logdate, logfile): + yield line + else: + for line in logfile: + yield '[%s]%s' % (logdate, line) def main(): session = loadSession() # FIXME: Don't hardcode server name. Handle multiple worlds? - LOG = '/opt/msm/servers/default/server.log' - unread = Pygtail(LOG) - + LOG_DIR = '/opt/msm/servers/default/logs' # Regexes - login_regex = re.compile('(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}) \[INFO\] (.+)\[/([0-9.]+):\d+\] logged in') - logout_regex = re.compile('(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}) \[INFO\] (.+) lost connection: disconnect') + date_pat = r'\[(\d{4}-\d{2}-\d{2})\]' # normally, there is no date (just time). But this script will append + # a date to each line for time keeping purposes while parsing + time_pat = r'\[(\d{2}:\d{2}:\d{2})\]' + timestamp = date_pat + time_pat + pri = r'\[Server thread/INFO\]:' + fmt_str = '%s %s %s' + login_regex = re.compile(fmt_str % (timestamp, pri, r'(.+)\[/([0-9.]+):\d+\] logged in')) + logout_regex = re.compile(fmt_str % (timestamp, pri, r'(.+) left the game')) - for line in unread: + for line in iterate_unread_lines(LOG_DIR): login_match = login_regex.search(line) logout_match = logout_regex.search(line) if login_match: - (date, player_name, ip) = login_match.groups() - login(session, player_name, date) + (date, time, player_name, ip) = login_match.groups() + login(session, player_name, '%s %s' % (date, time)) elif logout_match: - (date, player_name) = logout_match.groups() - logout(session, player_name, date) + (date, time, player_name) = logout_match.groups() + logout(session, player_name, '%s %s' % (date, time)) if __name__ == "__main__": diff --git a/modules/msm_backup_restore/files/init.d/msm-log-rotate b/modules/msm_backup_restore/files/init.d/msm-log-rotate deleted file mode 100644 index 0f8d5e9..0000000 --- a/modules/msm_backup_restore/files/init.d/msm-log-rotate +++ /dev/null @@ -1,81 +0,0 @@ -#! /bin/sh -### BEGIN INIT INFO -# Provides: msm-log-rotate -# Required-Start: $remote_fs $syslog $network $time -# Required-Stop: $remote_fs $syslog $network $time -# Default-Start: 2 3 4 5 -# Default-Stop: 0 1 6 -# X-Start-Before: msm -# X-Stop-After: msm -# Short-Description: Gzip log file and move it to log archives dir. -# Description: Gzip log file and move it to log archives dir. -### END INIT INFO - -# Author: Tom Offermann -# - -# Do NOT "set -e" - - -# PATH should only include /usr/* if it runs after the mountnfs.sh script -PATH=/sbin:/usr/sbin:/bin:/usr/bin:/usr/local/bin -DESC="Gzip log file and move it to log archives dir." -NAME=msm-log-rotate -SCRIPTNAME=/etc/init.d/$NAME - -# Load the VERBOSE setting and other rcS variables -. /lib/init/vars.sh - -# Define LSB log_* functions. -# Depend on lsb-base (>= 3.2-14) to ensure that this file is present -# and status_of_proc is working. -. /lib/lsb/init-functions - -# -# Function that starts the daemon/service -# -do_start() -{ - # Nothing to do on start. - return 1 -} - -# -# Function that stops the daemon/service -# -do_stop() -{ - # Roll log file - MSM='/usr/local/bin/msm' - if [ -x $MSM ]; then - $MSM all logroll - find /opt/msm/servers -name 'server.log.offset' -exec rm '{}' ';' - else - return 2 - fi -} - -case "$1" in - start) - [ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME" - do_start - case "$?" in - 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; - 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; - esac - ;; - stop) - [ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME" - do_stop - case "$?" in - 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; - 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; - esac - ;; - *) - echo "Usage: $SCRIPTNAME {start|stop}" >&2 - exit 3 - ;; -esac - -: diff --git a/modules/msm_backup_restore/manifests/init.pp b/modules/msm_backup_restore/manifests/init.pp index b341e45..cdbfe45 100644 --- a/modules/msm_backup_restore/manifests/init.pp +++ b/modules/msm_backup_restore/manifests/init.pp @@ -29,13 +29,6 @@ source => 'puppet:///modules/msm_backup_restore/init.d/msm-jar-update', } - file {'/etc/init.d/msm-log-rotate': - owner => root, - group => root, - mode => 0755, - source => 'puppet:///modules/msm_backup_restore/init.d/msm-log-rotate', - } - file {'/etc/init.d/msm-update-auth-lists': owner => root, group => root, @@ -77,18 +70,6 @@ require => Exec['update-rc.d -f msm-jar-update remove'], } - exec {'update-rc.d -f msm-log-rotate remove': - user => root, - path => '/usr/sbin', - require => File['/etc/init.d/msm-log-rotate'], - } - - exec {'update-rc.d msm-log-rotate defaults 96 04': - user => root, - path => '/usr/sbin', - require => Exec['update-rc.d -f msm-log-rotate remove'], - } - exec {'update-rc.d -f msm-update-auth-lists remove': user => root, path => '/usr/sbin', diff --git a/modules/oracle_java/manifests/init.pp b/modules/oracle_java/manifests/init.pp index aa47517..cede10d 100644 --- a/modules/oracle_java/manifests/init.pp +++ b/modules/oracle_java/manifests/init.pp @@ -1,28 +1,19 @@ class oracle_java { - user {'oab': - ensure => present, - home => '/home/oab', - managehome => true, + exec{'add_java_repo': + path => '/usr/bin', + command => 'add-apt-repository -y ppa:webupd8team/java && echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | sudo /usr/bin/debconf-set-selections && sudo apt-get update -y', } - exec {'git_clone_oab': - user => oab, - path => '/usr/bin', - #command => 'git clone git://github.com/flexiondotorg/oab-java6.git /home/oab/oab-java', - command => 'git clone https://github.com/ladios/oab-java6.git /home/oab/oab-java', - creates => '/home/oab/oab-java', - require => User['oab'], + exec{'build_java': + user => root, + path => [ "/bin/", "/sbin/" , "/usr/bin/", "/usr/sbin/", "/usr/local/sbin/" ], + command => 'apt-get install -y --force-yes oracle-java8-installer', + require => Exec['add_java_repo'], + logoutput => on_failure } - - exec {'build_java': - user => root, - timeout => 0, - command => '/home/oab/oab-java/oab-java.sh -7s', - require => Exec['git_clone_oab'], - } - - package{ 'oracle-java7-jre': + + package{ 'oracle-java8-jre': ensure => present, require => Exec['build_java'], }