[aur-dev] [PATCH 0/8] Introduce a test suite for the Git/SSH interface
This larger patch set adds a test suite for the Git/SSH interface. It also includes some very basic tests of git-auth, git-serve and git-update. The series consists of three major parts: Patches 1, 4, 5, 6 and 7 prepare the code and the database schema such that one can alternatively use SQLite as database backend. Patches 2 and 3 add support for overriding the path to the configuration file using an environment variable. The last patch finally adds the test suite and some tests. Since it makes use of the new SQLite backend, no MySQL server is required. The whole topic will be merged to pu and sit there for a while. Comments welcome! Lukas Fleischer (8): git-interface: Add database abstraction layer git-interface: Factor out configuration file parsing git-interface: Add AUR_CONFIG environment variable git-interface: Do not use rowcount git-interface: Do not use UNIX_TIMESTAMP git-interface: Support SQLite as database backend Cleanup database schema git-interface: Add test suite and basic tests conf/config.proto | 2 +- git-interface/config.py | 31 ++ git-interface/db.py | 51 +++ git-interface/git-auth.py | 33 +- git-interface/git-serve.py | 96 ++--- git-interface/git-update.py | 162 ++++--- git-interface/test/Makefile | 11 + git-interface/test/setup.sh | 121 ++++++ git-interface/test/sharness.sh | 851 +++++++++++++++++++++++++++++++++++++ git-interface/test/t0001-auth.sh | 19 + git-interface/test/t0002-serve.sh | 26 ++ git-interface/test/t0003-update.sh | 20 + schema/aur-schema.sql | 67 ++- 13 files changed, 1288 insertions(+), 202 deletions(-) create mode 100644 git-interface/config.py create mode 100644 git-interface/db.py create mode 100644 git-interface/test/Makefile create mode 100644 git-interface/test/setup.sh create mode 100644 git-interface/test/sharness.sh create mode 100755 git-interface/test/t0001-auth.sh create mode 100755 git-interface/test/t0002-serve.sh create mode 100755 git-interface/test/t0003-update.sh -- 2.9.2
Add a new class that connects to the database specified in the configuration file and provides an interface to execute SQL queries. Prepared statements with qmark ("?") placeholders are supported. Replace all direct database accesses with calls to the new abstraction layer. Signed-off-by: Lukas Fleischer <lfleischer@archlinux.org> --- git-interface/db.py | 38 ++++++++++++ git-interface/git-auth.py | 22 +++---- git-interface/git-serve.py | 92 ++++++++++++---------------- git-interface/git-update.py | 144 +++++++++++++++++++++----------------------- 4 files changed, 150 insertions(+), 146 deletions(-) create mode 100644 git-interface/db.py diff --git a/git-interface/db.py b/git-interface/db.py new file mode 100644 index 0000000..d3e1e69 --- /dev/null +++ b/git-interface/db.py @@ -0,0 +1,38 @@ +import configparser +import mysql.connector +import os + + +class Connection: + _conn = None + + def __init__(self): + config = configparser.RawConfigParser() + config.read(os.path.dirname(os.path.realpath(__file__)) + "/../conf/config") + + aur_db_host = config.get('database', 'host') + aur_db_name = config.get('database', 'name') + aur_db_user = config.get('database', 'user') + aur_db_pass = config.get('database', 'password') + aur_db_socket = config.get('database', 'socket') + + self._conn = mysql.connector.connect(host=aur_db_host, + user=aur_db_user, + passwd=aur_db_pass, + db=aur_db_name, + unix_socket=aur_db_socket, + buffered=True) + + def execute(self, query, params=()): + query = query.replace('%', '%%').replace('?', '%s') + + cur = self._conn.cursor() + cur.execute(query, params) + + return cur + + def commit(self): + self._conn.commit() + + def close(self): + self._conn.close() diff --git a/git-interface/git-auth.py b/git-interface/git-auth.py index 83bd20c..7cd033c 100755 --- a/git-interface/git-auth.py +++ b/git-interface/git-auth.py @@ -1,12 +1,13 @@ #!/usr/bin/python3 import configparser -import mysql.connector import shlex import os import re import sys +import db + def format_command(env_vars, command, ssh_opts, ssh_key): environment = '' @@ -26,12 +27,6 @@ def format_command(env_vars, command, ssh_opts, ssh_key): config = configparser.RawConfigParser() config.read(os.path.dirname(os.path.realpath(__file__)) + "/../conf/config") -aur_db_host = config.get('database', 'host') -aur_db_name = config.get('database', 'name') -aur_db_user = config.get('database', 'user') -aur_db_pass = config.get('database', 'password') -aur_db_socket = config.get('database', 'socket') - valid_keytypes = config.get('auth', 'valid-keytypes').split() username_regex = config.get('auth', 'username-regex') git_serve_cmd = config.get('auth', 'git-serve-cmd') @@ -42,15 +37,12 @@ keytext = sys.argv[2] if keytype not in valid_keytypes: exit(1) -db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, - passwd=aur_db_pass, db=aur_db_name, - unix_socket=aur_db_socket, buffered=True) +conn = db.Connection() -cur = db.cursor() -cur.execute("SELECT Users.Username, Users.AccountTypeID FROM Users " + - "INNER JOIN SSHPubKeys ON SSHPubKeys.UserID = Users.ID " - "WHERE SSHPubKeys.PubKey = %s AND Users.Suspended = 0", - (keytype + " " + keytext,)) +cur = conn.execute("SELECT Users.Username, Users.AccountTypeID FROM Users " + + "INNER JOIN SSHPubKeys ON SSHPubKeys.UserID = Users.ID " + "WHERE SSHPubKeys.PubKey = ? AND Users.Suspended = 0", + (keytype + " " + keytext,)) if cur.rowcount != 1: exit(1) diff --git a/git-interface/git-serve.py b/git-interface/git-serve.py index e0ebd0e..ab612f0 100755 --- a/git-interface/git-serve.py +++ b/git-interface/git-serve.py @@ -1,21 +1,16 @@ #!/usr/bin/python3 import configparser -import mysql.connector import os import re import shlex import sys +import db + config = configparser.RawConfigParser() config.read(os.path.dirname(os.path.realpath(__file__)) + "/../conf/config") -aur_db_host = config.get('database', 'host') -aur_db_name = config.get('database', 'name') -aur_db_user = config.get('database', 'user') -aur_db_pass = config.get('database', 'password') -aur_db_socket = config.get('database', 'socket') - repo_path = config.get('serve', 'repo-path') repo_regex = config.get('serve', 'repo-regex') git_shell_cmd = config.get('serve', 'git-shell-cmd') @@ -27,12 +22,8 @@ maintenance_exc = config.get('options', 'maintenance-exceptions').split() def pkgbase_from_name(pkgbase): - db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, - passwd=aur_db_pass, db=aur_db_name, - unix_socket=aur_db_socket) - cur = db.cursor() - cur.execute("SELECT ID FROM PackageBases WHERE Name = %s", [pkgbase]) - db.close() + conn = db.Connection() + cur = conn.execute("SELECT ID FROM PackageBases WHERE Name = ?", [pkgbase]) row = cur.fetchone() return row[0] if row else None @@ -43,21 +34,18 @@ def pkgbase_exists(pkgbase): def list_repos(user): - db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, - passwd=aur_db_pass, db=aur_db_name, - unix_socket=aur_db_socket) - cur = db.cursor() + conn = db.Connection() - cur.execute("SELECT ID FROM Users WHERE Username = %s ", [user]) + cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user]) userid = cur.fetchone()[0] if userid == 0: die('{:s}: unknown user: {:s}'.format(action, user)) - cur.execute("SELECT Name, PackagerUID FROM PackageBases " + - "WHERE MaintainerUID = %s ", [userid]) + cur = conn.execute("SELECT Name, PackagerUID FROM PackageBases " + + "WHERE MaintainerUID = ?", [userid]) for row in cur: print((' ' if row[1] else '*') + row[0]) - db.close() + conn.close() def create_pkgbase(pkgbase, user): @@ -66,26 +54,25 @@ def create_pkgbase(pkgbase, user): if pkgbase_exists(pkgbase): die('{:s}: package base already exists: {:s}'.format(action, pkgbase)) - db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, - passwd=aur_db_pass, db=aur_db_name, - unix_socket=aur_db_socket) - cur = db.cursor() + conn = db.Connection() - cur.execute("SELECT ID FROM Users WHERE Username = %s ", [user]) + cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user]) userid = cur.fetchone()[0] if userid == 0: die('{:s}: unknown user: {:s}'.format(action, user)) - cur.execute("INSERT INTO PackageBases (Name, SubmittedTS, ModifiedTS, " + - "SubmitterUID, MaintainerUID) VALUES (%s, UNIX_TIMESTAMP(), " + - "UNIX_TIMESTAMP(), %s, %s)", [pkgbase, userid, userid]) + cur = conn.execute("INSERT INTO PackageBases (Name, SubmittedTS, " + + "ModifiedTS, SubmitterUID, MaintainerUID) VALUES " + + "(?, UNIX_TIMESTAMP(), UNIX_TIMESTAMP(), ?, ?)", + [pkgbase, userid, userid]) pkgbase_id = cur.lastrowid - cur.execute("INSERT INTO PackageNotifications (PackageBaseID, UserID) " + - "VALUES (%s, %s)", [pkgbase_id, userid]) + cur = conn.execute("INSERT INTO PackageNotifications " + + "(PackageBaseID, UserID) VALUES (?, ?)", + [pkgbase_id, userid]) - db.commit() - db.close() + conn.commit() + conn.close() def pkgbase_set_keywords(pkgbase, keywords): @@ -93,34 +80,29 @@ def pkgbase_set_keywords(pkgbase, keywords): if not pkgbase_id: die('{:s}: package base not found: {:s}'.format(action, pkgbase)) - db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, - passwd=aur_db_pass, db=aur_db_name, - unix_socket=aur_db_socket) - cur = db.cursor() + conn = db.Connection() - cur.execute("DELETE FROM PackageKeywords WHERE PackageBaseID = %s", - [pkgbase_id]) + conn.execute("DELETE FROM PackageKeywords WHERE PackageBaseID = ?", + [pkgbase_id]) for keyword in keywords: - cur.execute("INSERT INTO PackageKeywords (PackageBaseID, Keyword) " - "VALUES (%s, %s)", [pkgbase_id, keyword]) + conn.execute("INSERT INTO PackageKeywords (PackageBaseID, Keyword) " + + "VALUES (?, ?)", [pkgbase_id, keyword]) - db.commit() - db.close() + conn.commit() + conn.close() def pkgbase_has_write_access(pkgbase, user): - db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, - passwd=aur_db_pass, db=aur_db_name, - unix_socket=aur_db_socket, buffered=True) - cur = db.cursor() - - cur.execute("SELECT COUNT(*) FROM PackageBases " + - "LEFT JOIN PackageComaintainers " + - "ON PackageComaintainers.PackageBaseID = PackageBases.ID " + - "INNER JOIN Users ON Users.ID = PackageBases.MaintainerUID " + - "OR PackageBases.MaintainerUID IS NULL " + - "OR Users.ID = PackageComaintainers.UsersID " + - "WHERE Name = %s AND Username = %s", [pkgbase, user]) + conn = db.Connection() + + cur = conn.execute("SELECT COUNT(*) FROM PackageBases " + + "LEFT JOIN PackageComaintainers " + + "ON PackageComaintainers.PackageBaseID = PackageBases.ID " + + "INNER JOIN Users " + + "ON Users.ID = PackageBases.MaintainerUID " + + "OR PackageBases.MaintainerUID IS NULL " + + "OR Users.ID = PackageComaintainers.UsersID " + + "WHERE Name = ? AND Username = ?", [pkgbase, user]) return cur.fetchone()[0] > 0 diff --git a/git-interface/git-update.py b/git-interface/git-update.py index 7aace9b..b7199e6 100755 --- a/git-interface/git-update.py +++ b/git-interface/git-update.py @@ -1,7 +1,6 @@ #!/usr/bin/python3 import configparser -import mysql.connector import os import pygit2 import re @@ -11,15 +10,11 @@ import sys import srcinfo.parse import srcinfo.utils +import db + config = configparser.RawConfigParser() config.read(os.path.dirname(os.path.realpath(__file__)) + "/../conf/config") -aur_db_host = config.get('database', 'host') -aur_db_name = config.get('database', 'name') -aur_db_user = config.get('database', 'user') -aur_db_pass = config.get('database', 'password') -aur_db_socket = config.get('database', 'socket') - notify_cmd = config.get('notifications', 'notify-cmd') repo_path = config.get('serve', 'repo-path') @@ -65,27 +60,27 @@ def parse_dep(depstring): return (depname, depcond) -def save_metadata(metadata, db, cur, user): +def save_metadata(metadata, conn, user): # Obtain package base ID and previous maintainer. pkgbase = metadata['pkgbase'] - cur.execute("SELECT ID, MaintainerUID FROM PackageBases " - "WHERE Name = %s", [pkgbase]) + cur = conn.execute("SELECT ID, MaintainerUID FROM PackageBases " + "WHERE Name = ?", [pkgbase]) (pkgbase_id, maintainer_uid) = cur.fetchone() was_orphan = not maintainer_uid # Obtain the user ID of the new maintainer. - cur.execute("SELECT ID FROM Users WHERE Username = %s", [user]) + cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user]) user_id = int(cur.fetchone()[0]) # Update package base details and delete current packages. - cur.execute("UPDATE PackageBases SET ModifiedTS = UNIX_TIMESTAMP(), " + - "PackagerUID = %s, OutOfDateTS = NULL WHERE ID = %s", - [user_id, pkgbase_id]) - cur.execute("UPDATE PackageBases SET MaintainerUID = %s " + - "WHERE ID = %s AND MaintainerUID IS NULL", - [user_id, pkgbase_id]) - cur.execute("DELETE FROM Packages WHERE PackageBaseID = %s", - [pkgbase_id]) + conn.execute("UPDATE PackageBases SET ModifiedTS = UNIX_TIMESTAMP(), " + + "PackagerUID = ?, OutOfDateTS = NULL WHERE ID = ?", + [user_id, pkgbase_id]) + conn.execute("UPDATE PackageBases SET MaintainerUID = ? " + + "WHERE ID = ? AND MaintainerUID IS NULL", + [user_id, pkgbase_id]) + conn.execute("DELETE FROM Packages WHERE PackageBaseID = ?", + [pkgbase_id]) for pkgname in srcinfo.utils.get_package_names(metadata): pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata) @@ -102,94 +97,94 @@ def save_metadata(metadata, db, cur, user): pkginfo[field] = None # Create a new package. - cur.execute("INSERT INTO Packages (PackageBaseID, Name, " + - "Version, Description, URL) " + - "VALUES (%s, %s, %s, %s, %s)", - [pkgbase_id, pkginfo['pkgname'], ver, - pkginfo['pkgdesc'], pkginfo['url']]) - db.commit() + cur = conn.execute("INSERT INTO Packages (PackageBaseID, Name, " + + "Version, Description, URL) " + + "VALUES (?, ?, ?, ?, ?)", + [pkgbase_id, pkginfo['pkgname'], ver, + pkginfo['pkgdesc'], pkginfo['url']]) + conn.commit() pkgid = cur.lastrowid # Add package sources. for source_info in extract_arch_fields(pkginfo, 'source'): - cur.execute("INSERT INTO PackageSources (PackageID, Source, " + - "SourceArch) VALUES (%s, %s, %s)", - [pkgid, source_info['value'], source_info['arch']]) + conn.execute("INSERT INTO PackageSources (PackageID, Source, " + + "SourceArch) VALUES (?, ?, ?)", + [pkgid, source_info['value'], source_info['arch']]) # Add package dependencies. for deptype in ('depends', 'makedepends', 'checkdepends', 'optdepends'): - cur.execute("SELECT ID FROM DependencyTypes WHERE Name = %s", - [deptype]) + cur = conn.execute("SELECT ID FROM DependencyTypes WHERE Name = ?", + [deptype]) deptypeid = cur.fetchone()[0] for dep_info in extract_arch_fields(pkginfo, deptype): depname, depcond = parse_dep(dep_info['value']) deparch = dep_info['arch'] - cur.execute("INSERT INTO PackageDepends (PackageID, " + - "DepTypeID, DepName, DepCondition, DepArch) " + - "VALUES (%s, %s, %s, %s, %s)", - [pkgid, deptypeid, depname, depcond, deparch]) + conn.execute("INSERT INTO PackageDepends (PackageID, " + + "DepTypeID, DepName, DepCondition, DepArch) " + + "VALUES (?, ?, ?, ?, ?)", + [pkgid, deptypeid, depname, depcond, deparch]) # Add package relations (conflicts, provides, replaces). for reltype in ('conflicts', 'provides', 'replaces'): - cur.execute("SELECT ID FROM RelationTypes WHERE Name = %s", - [reltype]) + cur = conn.execute("SELECT ID FROM RelationTypes WHERE Name = ?", + [reltype]) reltypeid = cur.fetchone()[0] for rel_info in extract_arch_fields(pkginfo, reltype): relname, relcond = parse_dep(rel_info['value']) relarch = rel_info['arch'] - cur.execute("INSERT INTO PackageRelations (PackageID, " + - "RelTypeID, RelName, RelCondition, RelArch) " + - "VALUES (%s, %s, %s, %s, %s)", - [pkgid, reltypeid, relname, relcond, relarch]) + conn.execute("INSERT INTO PackageRelations (PackageID, " + + "RelTypeID, RelName, RelCondition, RelArch) " + + "VALUES (?, ?, ?, ?, ?)", + [pkgid, reltypeid, relname, relcond, relarch]) # Add package licenses. if 'license' in pkginfo: for license in pkginfo['license']: - cur.execute("SELECT ID FROM Licenses WHERE Name = %s", - [license]) + cur = conn.execute("SELECT ID FROM Licenses WHERE Name = ?", + [license]) if cur.rowcount == 1: licenseid = cur.fetchone()[0] else: - cur.execute("INSERT INTO Licenses (Name) VALUES (%s)", - [license]) - db.commit() + cur = conn.execute("INSERT INTO Licenses (Name) " + + "VALUES (?)", [license]) + conn.commit() licenseid = cur.lastrowid - cur.execute("INSERT INTO PackageLicenses (PackageID, " + - "LicenseID) VALUES (%s, %s)", - [pkgid, licenseid]) + conn.execute("INSERT INTO PackageLicenses (PackageID, " + + "LicenseID) VALUES (?, ?)", + [pkgid, licenseid]) # Add package groups. if 'groups' in pkginfo: for group in pkginfo['groups']: - cur.execute("SELECT ID FROM Groups WHERE Name = %s", - [group]) + cur = conn.execute("SELECT ID FROM Groups WHERE Name = ?", + [group]) if cur.rowcount == 1: groupid = cur.fetchone()[0] else: - cur.execute("INSERT INTO Groups (Name) VALUES (%s)", - [group]) - db.commit() + cur = conn.execute("INSERT INTO Groups (Name) VALUES (?)", + [group]) + conn.commit() groupid = cur.lastrowid - cur.execute("INSERT INTO PackageGroups (PackageID, " - "GroupID) VALUES (%s, %s)", [pkgid, groupid]) + conn.execute("INSERT INTO PackageGroups (PackageID, " + "GroupID) VALUES (?, ?)", [pkgid, groupid]) # Add user to notification list on adoption. if was_orphan: - cur.execute("SELECT COUNT(*) FROM PackageNotifications WHERE " + - "PackageBaseID = %s AND UserID = %s", - [pkgbase_id, user_id]) + cur = conn.execute("SELECT COUNT(*) FROM PackageNotifications WHERE " + + "PackageBaseID = ? AND UserID = ?", + [pkgbase_id, user_id]) if cur.fetchone()[0] == 0: - cur.execute("INSERT INTO PackageNotifications " + - "(PackageBaseID, UserID) VALUES (%s, %s)", - [pkgbase_id, user_id]) + conn.execute("INSERT INTO PackageNotifications " + + "(PackageBaseID, UserID) VALUES (?, ?)", + [pkgbase_id, user_id]) - db.commit() + conn.commit() -def update_notify(db, cur, user, pkgbase_id): +def update_notify(conn, user, pkgbase_id): # Obtain the user ID of the new maintainer. - cur.execute("SELECT ID FROM Users WHERE Username = %s", [user]) + cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user]) user_id = int(cur.fetchone()[0]) # Execute the notification script. @@ -233,10 +228,7 @@ else: if refname != "refs/heads/master": die("pushing to a branch other than master is restricted") -db = mysql.connector.connect(host=aur_db_host, user=aur_db_user, - passwd=aur_db_pass, db=aur_db_name, - unix_socket=aur_db_socket, buffered=True) -cur = db.cursor() +conn = db.Connection() # Detect and deny non-fast-forwards. if sha1_old != "0000000000000000000000000000000000000000" and not privileged: @@ -339,13 +331,13 @@ if metadata_pkgbase != pkgbase: # Ensure that packages are neither blacklisted nor overwritten. pkgbase = metadata['pkgbase'] -cur.execute("SELECT ID FROM PackageBases WHERE Name = %s", [pkgbase]) +cur = conn.execute("SELECT ID FROM PackageBases WHERE Name = ?", [pkgbase]) pkgbase_id = cur.fetchone()[0] if cur.rowcount == 1 else 0 -cur.execute("SELECT Name FROM PackageBlacklist") +cur = conn.execute("SELECT Name FROM PackageBlacklist") blacklist = [row[0] for row in cur.fetchall()] -cur.execute("SELECT Name, Repo FROM OfficialProviders") +cur = conn.execute("SELECT Name, Repo FROM OfficialProviders") providers = dict(cur.fetchall()) for pkgname in srcinfo.utils.get_package_names(metadata): @@ -358,13 +350,13 @@ for pkgname in srcinfo.utils.get_package_names(metadata): repo = providers[pkgname] warn_or_die('package already provided by [{:s}]: {:s}'.format(repo, pkgname)) - cur.execute("SELECT COUNT(*) FROM Packages WHERE Name = %s AND " + - "PackageBaseID <> %s", [pkgname, pkgbase_id]) + cur = conn.execute("SELECT COUNT(*) FROM Packages WHERE Name = ? AND " + + "PackageBaseID <> ?", [pkgname, pkgbase_id]) if cur.fetchone()[0] > 0: die('cannot overwrite package: {:s}'.format(pkgname)) # Store package base details in the database. -save_metadata(metadata, db, cur, user) +save_metadata(metadata, conn, user) # Create (or update) a branch with the name of the package base for better # accessibility. @@ -377,7 +369,7 @@ repo.create_reference('refs/heads/' + pkgbase, sha1_new, True) repo.create_reference('refs/namespaces/' + pkgbase + '/HEAD', sha1_new, True) # Send package update notifications. -update_notify(db, cur, user, pkgbase_id) +update_notify(conn, user, pkgbase_id) # Close the database. -db.close() +conn.close() -- 2.9.2
Add a new module that automatically locates the configuration file and provides methods to obtain the values of configuration options. Use the new module instead of ConfigParser everywhere. Signed-off-by: Lukas Fleischer <lfleischer@archlinux.org> --- git-interface/config.py | 27 +++++++++++++++++++++++++++ git-interface/db.py | 7 ++----- git-interface/git-auth.py | 6 +----- git-interface/git-serve.py | 5 +---- git-interface/git-update.py | 5 +---- 5 files changed, 32 insertions(+), 18 deletions(-) create mode 100644 git-interface/config.py diff --git a/git-interface/config.py b/git-interface/config.py new file mode 100644 index 0000000..cd6495b --- /dev/null +++ b/git-interface/config.py @@ -0,0 +1,27 @@ +import configparser +import os + +_parser = None + + +def _get_parser(): + global _parser + + if not _parser: + _parser = configparser.RawConfigParser() + path = os.path.dirname(os.path.realpath(__file__)) + "/../conf/config" + _parser.read(path) + + return _parser + + +def get(section, option): + return _get_parser().get(section, option) + + +def getboolean(section, option): + return _get_parser().getboolean(section, option) + + +def getint(section, option): + return _get_parser().getint(section, option) diff --git a/git-interface/db.py b/git-interface/db.py index d3e1e69..c4c7d31 100644 --- a/git-interface/db.py +++ b/git-interface/db.py @@ -1,15 +1,12 @@ -import configparser import mysql.connector -import os + +import config class Connection: _conn = None def __init__(self): - config = configparser.RawConfigParser() - config.read(os.path.dirname(os.path.realpath(__file__)) + "/../conf/config") - aur_db_host = config.get('database', 'host') aur_db_name = config.get('database', 'name') aur_db_user = config.get('database', 'user') diff --git a/git-interface/git-auth.py b/git-interface/git-auth.py index 7cd033c..ebdc75c 100755 --- a/git-interface/git-auth.py +++ b/git-interface/git-auth.py @@ -1,11 +1,10 @@ #!/usr/bin/python3 -import configparser import shlex -import os import re import sys +import config import db @@ -24,9 +23,6 @@ def format_command(env_vars, command, ssh_opts, ssh_key): return msg -config = configparser.RawConfigParser() -config.read(os.path.dirname(os.path.realpath(__file__)) + "/../conf/config") - valid_keytypes = config.get('auth', 'valid-keytypes').split() username_regex = config.get('auth', 'username-regex') git_serve_cmd = config.get('auth', 'git-serve-cmd') diff --git a/git-interface/git-serve.py b/git-interface/git-serve.py index ab612f0..6377ffc 100755 --- a/git-interface/git-serve.py +++ b/git-interface/git-serve.py @@ -1,16 +1,13 @@ #!/usr/bin/python3 -import configparser import os import re import shlex import sys +import config import db -config = configparser.RawConfigParser() -config.read(os.path.dirname(os.path.realpath(__file__)) + "/../conf/config") - repo_path = config.get('serve', 'repo-path') repo_regex = config.get('serve', 'repo-regex') git_shell_cmd = config.get('serve', 'git-shell-cmd') diff --git a/git-interface/git-update.py b/git-interface/git-update.py index b7199e6..9a127a9 100755 --- a/git-interface/git-update.py +++ b/git-interface/git-update.py @@ -1,6 +1,5 @@ #!/usr/bin/python3 -import configparser import os import pygit2 import re @@ -10,11 +9,9 @@ import sys import srcinfo.parse import srcinfo.utils +import config import db -config = configparser.RawConfigParser() -config.read(os.path.dirname(os.path.realpath(__file__)) + "/../conf/config") - notify_cmd = config.get('notifications', 'notify-cmd') repo_path = config.get('serve', 'repo-path') -- 2.9.2
Introduce a new environment variable that can be used to specify the path to an aurweb configuration file. If the environment variable is unset, the default search path is used. Signed-off-by: Lukas Fleischer <lfleischer@archlinux.org> --- git-interface/config.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/git-interface/config.py b/git-interface/config.py index cd6495b..aac188b 100644 --- a/git-interface/config.py +++ b/git-interface/config.py @@ -9,7 +9,11 @@ def _get_parser(): if not _parser: _parser = configparser.RawConfigParser() - path = os.path.dirname(os.path.realpath(__file__)) + "/../conf/config" + if 'AUR_CONFIG' in os.environ: + path = os.environ.get('AUR_CONFIG') + else: + relpath = "/../conf/config" + path = os.path.dirname(os.path.realpath(__file__)) + relpath _parser.read(path) return _parser -- 2.9.2
Avoid using Cursor.rowcount to obtain the number of rows returned by a SELECT statement as this is not guaranteed to be supported by every database engine. Signed-off-by: Lukas Fleischer <lfleischer@archlinux.org> --- git-interface/git-auth.py | 5 +++-- git-interface/git-update.py | 13 ++++++++----- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/git-interface/git-auth.py b/git-interface/git-auth.py index ebdc75c..45fd577 100755 --- a/git-interface/git-auth.py +++ b/git-interface/git-auth.py @@ -40,10 +40,11 @@ cur = conn.execute("SELECT Users.Username, Users.AccountTypeID FROM Users " + "WHERE SSHPubKeys.PubKey = ? AND Users.Suspended = 0", (keytype + " " + keytext,)) -if cur.rowcount != 1: +row = cur.fetchone() +if not row or cur.fetchone(): exit(1) -user, account_type = cur.fetchone() +user, account_type = row if not re.match(username_regex, user): exit(1) diff --git a/git-interface/git-update.py b/git-interface/git-update.py index 9a127a9..d6c9f10 100755 --- a/git-interface/git-update.py +++ b/git-interface/git-update.py @@ -140,8 +140,9 @@ def save_metadata(metadata, conn, user): for license in pkginfo['license']: cur = conn.execute("SELECT ID FROM Licenses WHERE Name = ?", [license]) - if cur.rowcount == 1: - licenseid = cur.fetchone()[0] + row = cur.fetchone() + if row: + licenseid = row[0] else: cur = conn.execute("INSERT INTO Licenses (Name) " + "VALUES (?)", [license]) @@ -156,8 +157,9 @@ def save_metadata(metadata, conn, user): for group in pkginfo['groups']: cur = conn.execute("SELECT ID FROM Groups WHERE Name = ?", [group]) - if cur.rowcount == 1: - groupid = cur.fetchone()[0] + row = cur.fetchone() + if row: + groupid = row[0] else: cur = conn.execute("INSERT INTO Groups (Name) VALUES (?)", [group]) @@ -329,7 +331,8 @@ if metadata_pkgbase != pkgbase: # Ensure that packages are neither blacklisted nor overwritten. pkgbase = metadata['pkgbase'] cur = conn.execute("SELECT ID FROM PackageBases WHERE Name = ?", [pkgbase]) -pkgbase_id = cur.fetchone()[0] if cur.rowcount == 1 else 0 +row = cur.fetchone() +pkgbase_id = row[0] if row else 0 cur = conn.execute("SELECT Name FROM PackageBlacklist") blacklist = [row[0] for row in cur.fetchall()] -- 2.9.2
Avoid using UNIX_TIMESTAMP which is not part of the SQL standard. Retrieve the current UNIX time in Python and substitute it into the SQL queries instead. Signed-off-by: Lukas Fleischer <lfleischer@archlinux.org> --- git-interface/git-serve.py | 5 +++-- git-interface/git-update.py | 6 ++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/git-interface/git-serve.py b/git-interface/git-serve.py index 6377ffc..d3a32c3 100755 --- a/git-interface/git-serve.py +++ b/git-interface/git-serve.py @@ -4,6 +4,7 @@ import os import re import shlex import sys +import time import config import db @@ -58,10 +59,10 @@ def create_pkgbase(pkgbase, user): if userid == 0: die('{:s}: unknown user: {:s}'.format(action, user)) + now = int(time.time()) cur = conn.execute("INSERT INTO PackageBases (Name, SubmittedTS, " + "ModifiedTS, SubmitterUID, MaintainerUID) VALUES " + - "(?, UNIX_TIMESTAMP(), UNIX_TIMESTAMP(), ?, ?)", - [pkgbase, userid, userid]) + "(?, ?, ?, ?, ?)", [pkgbase, now, now, userid, userid]) pkgbase_id = cur.lastrowid cur = conn.execute("INSERT INTO PackageNotifications " + diff --git a/git-interface/git-update.py b/git-interface/git-update.py index d6c9f10..2820720 100755 --- a/git-interface/git-update.py +++ b/git-interface/git-update.py @@ -5,6 +5,7 @@ import pygit2 import re import subprocess import sys +import time import srcinfo.parse import srcinfo.utils @@ -70,9 +71,10 @@ def save_metadata(metadata, conn, user): user_id = int(cur.fetchone()[0]) # Update package base details and delete current packages. - conn.execute("UPDATE PackageBases SET ModifiedTS = UNIX_TIMESTAMP(), " + + now = int(time.time()) + conn.execute("UPDATE PackageBases SET ModifiedTS = ?, " + "PackagerUID = ?, OutOfDateTS = NULL WHERE ID = ?", - [user_id, pkgbase_id]) + [now, user_id, pkgbase_id]) conn.execute("UPDATE PackageBases SET MaintainerUID = ? " + "WHERE ID = ? AND MaintainerUID IS NULL", [user_id, pkgbase_id]) -- 2.9.2
In addition to MySQL, add support for SQLite to the database abstraction layer. Also, add a new configuration option to select the DBMS. Signed-off-by: Lukas Fleischer <lfleischer@archlinux.org> --- conf/config.proto | 2 +- git-interface/db.py | 42 +++++++++++++++++++++++++++++------------- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/conf/config.proto b/conf/config.proto index 543c3ca..c56141c 100644 --- a/conf/config.proto +++ b/conf/config.proto @@ -1,5 +1,5 @@ [database] -dsn_prefix = mysql +backend = mysql host = localhost socket = /var/run/mysqld/mysqld.sock name = AUR diff --git a/git-interface/db.py b/git-interface/db.py index c4c7d31..060689b 100644 --- a/git-interface/db.py +++ b/git-interface/db.py @@ -1,27 +1,43 @@ import mysql.connector +import sqlite3 import config class Connection: _conn = None + _paramstyle = None def __init__(self): - aur_db_host = config.get('database', 'host') - aur_db_name = config.get('database', 'name') - aur_db_user = config.get('database', 'user') - aur_db_pass = config.get('database', 'password') - aur_db_socket = config.get('database', 'socket') - - self._conn = mysql.connector.connect(host=aur_db_host, - user=aur_db_user, - passwd=aur_db_pass, - db=aur_db_name, - unix_socket=aur_db_socket, - buffered=True) + aur_db_backend = config.get('database', 'backend') + + if aur_db_backend == 'mysql': + aur_db_host = config.get('database', 'host') + aur_db_name = config.get('database', 'name') + aur_db_user = config.get('database', 'user') + aur_db_pass = config.get('database', 'password') + aur_db_socket = config.get('database', 'socket') + self._conn = mysql.connector.connect(host=aur_db_host, + user=aur_db_user, + passwd=aur_db_pass, + db=aur_db_name, + unix_socket=aur_db_socket, + buffered=True) + self._paramstyle = mysql.connector.paramstyle + elif aur_db_backend == 'sqlite': + aur_db_name = config.get('database', 'name') + self._conn = sqlite3.connect(aur_db_name) + self._paramstyle = sqlite3.paramstyle + else: + raise ValueError('unsupported database backend') def execute(self, query, params=()): - query = query.replace('%', '%%').replace('?', '%s') + if self._paramstyle == 'format': + query = query.replace('%', '%%').replace('?', '%s') + elif self._paramstyle == 'qmark': + pass + else: + raise ValueError('unsupported paramstyle') cur = self._conn.cursor() cur.execute(query, params) -- 2.9.2
* Remove test accounts. * Create indices using CREATE INDEX. * Always use INTEGER UNSIGNED for IDs. * Always use BIGINT UNSIGNED for timestamps. Signed-off-by: Lukas Fleischer <lfleischer@archlinux.org> --- schema/aur-schema.sql | 67 +++++++++++++++++++++++---------------------------- 1 file changed, 30 insertions(+), 37 deletions(-) diff --git a/schema/aur-schema.sql b/schema/aur-schema.sql index 9991129..030370b 100644 --- a/schema/aur-schema.sql +++ b/schema/aur-schema.sql @@ -45,16 +45,9 @@ CREATE TABLE Users ( PRIMARY KEY (ID), UNIQUE (Username), UNIQUE (Email), - INDEX (AccountTypeID), FOREIGN KEY (AccountTypeID) REFERENCES AccountTypes(ID) ON DELETE NO ACTION ) ENGINE = InnoDB; --- A default developer account for testing purposes -INSERT INTO Users (ID, AccountTypeID, Username, Email, Passwd) VALUES ( - 1, 3, 'dev', 'dev@localhost', MD5('dev')); -INSERT INTO Users (ID, AccountTypeID, Username, Email, Passwd) VALUES ( - 2, 2, 'tu', 'tu@localhost', MD5('tu')); -INSERT INTO Users (ID, AccountTypeID, Username, Email, Passwd) VALUES ( - 3, 1, 'user', 'user@localhost', MD5('user')); +CREATE INDEX UsersAccountTypeID ON Users (AccountTypeID); -- SSH public keys used for the aurweb SSH/Git interface. @@ -96,16 +89,16 @@ CREATE TABLE PackageBases ( PackagerUID INTEGER UNSIGNED NULL DEFAULT NULL, -- Last packager PRIMARY KEY (ID), UNIQUE (Name), - INDEX (NumVotes), - INDEX (SubmitterUID), - INDEX (MaintainerUID), - INDEX (PackagerUID), FOREIGN KEY (FlaggerUID) REFERENCES Users(ID) ON DELETE SET NULL, -- deleting a user will cause packages to be orphaned, not deleted FOREIGN KEY (SubmitterUID) REFERENCES Users(ID) ON DELETE SET NULL, FOREIGN KEY (MaintainerUID) REFERENCES Users(ID) ON DELETE SET NULL, FOREIGN KEY (PackagerUID) REFERENCES Users(ID) ON DELETE SET NULL ) ENGINE = InnoDB; +CREATE INDEX BasesNumVotes ON PackageBases (NumVotes); +CREATE INDEX BasesSubmitterUID ON PackageBases (SubmitterUID); +CREATE INDEX BasesMaintainerUID ON PackageBases (MaintainerUID); +CREATE INDEX BasesPackagerUID ON PackageBases (PackagerUID); -- Keywords of package bases @@ -196,11 +189,11 @@ CREATE TABLE PackageDepends ( DepName VARCHAR(255) NOT NULL, DepCondition VARCHAR(255), DepArch VARCHAR(255) NULL DEFAULT NULL, - INDEX (PackageID), - INDEX (DepName), FOREIGN KEY (PackageID) REFERENCES Packages(ID) ON DELETE CASCADE, FOREIGN KEY (DepTypeID) REFERENCES DependencyTypes(ID) ON DELETE NO ACTION ) ENGINE = InnoDB; +CREATE INDEX DependsPackageID ON PackageDepends (PackageID); +CREATE INDEX DependsDepName ON PackageDepends (DepName); -- Define the package relation types @@ -223,11 +216,11 @@ CREATE TABLE PackageRelations ( RelName VARCHAR(255) NOT NULL, RelCondition VARCHAR(255), RelArch VARCHAR(255) NULL DEFAULT NULL, - INDEX (PackageID), - INDEX (RelName), FOREIGN KEY (PackageID) REFERENCES Packages(ID) ON DELETE CASCADE, FOREIGN KEY (RelTypeID) REFERENCES RelationTypes(ID) ON DELETE NO ACTION ) ENGINE = InnoDB; +CREATE INDEX RelationsPackageID ON PackageRelations (PackageID); +CREATE INDEX RelationsRelName ON PackageRelations (RelName); -- Track which sources a package has @@ -236,9 +229,9 @@ CREATE TABLE PackageSources ( PackageID INTEGER UNSIGNED NOT NULL, Source VARCHAR(255) NOT NULL DEFAULT "/dev/null", SourceArch VARCHAR(255) NULL DEFAULT NULL, - INDEX (PackageID), FOREIGN KEY (PackageID) REFERENCES Packages(ID) ON DELETE CASCADE ) ENGINE = InnoDB; +CREATE INDEX SourcesPackageID ON PackageSources (PackageID); -- Track votes for packages @@ -247,12 +240,12 @@ CREATE TABLE PackageVotes ( UsersID INTEGER UNSIGNED NOT NULL, PackageBaseID INTEGER UNSIGNED NOT NULL, VoteTS BIGINT UNSIGNED NULL DEFAULT NULL, - INDEX (UsersID), - INDEX (PackageBaseID), FOREIGN KEY (UsersID) REFERENCES Users(ID) ON DELETE CASCADE, FOREIGN KEY (PackageBaseID) REFERENCES PackageBases(ID) ON DELETE CASCADE ) ENGINE = InnoDB; CREATE UNIQUE INDEX VoteUsersIDPackageID ON PackageVotes (UsersID, PackageBaseID); +CREATE INDEX VotesUsersID ON PackageVotes (UsersID); +CREATE INDEX VotesPackageBaseID ON PackageVotes (PackageBaseID); -- Record comments for packages -- @@ -268,13 +261,13 @@ CREATE TABLE PackageComments ( DelUsersID INTEGER UNSIGNED NULL DEFAULT NULL, PinnedTS BIGINT UNSIGNED NOT NULL DEFAULT 0, PRIMARY KEY (ID), - INDEX (UsersID), - INDEX (PackageBaseID), FOREIGN KEY (UsersID) REFERENCES Users(ID) ON DELETE SET NULL, FOREIGN KEY (EditedUsersID) REFERENCES Users(ID) ON DELETE SET NULL, FOREIGN KEY (DelUsersID) REFERENCES Users(ID) ON DELETE CASCADE, FOREIGN KEY (PackageBaseID) REFERENCES PackageBases(ID) ON DELETE CASCADE ) ENGINE = InnoDB; +CREATE INDEX CommentsUsersID ON PackageComments (UsersID); +CREATE INDEX CommentsPackageBaseID ON PackageComments (PackageBaseID); -- Package base co-maintainers -- @@ -282,11 +275,11 @@ CREATE TABLE PackageComaintainers ( UsersID INTEGER UNSIGNED NOT NULL, PackageBaseID INTEGER UNSIGNED NOT NULL, Priority INTEGER UNSIGNED NOT NULL, - INDEX (UsersID), - INDEX (PackageBaseID), FOREIGN KEY (UsersID) REFERENCES Users(ID) ON DELETE CASCADE, FOREIGN KEY (PackageBaseID) REFERENCES PackageBases(ID) ON DELETE CASCADE ) ENGINE = InnoDB; +CREATE INDEX ComaintainersUsersID ON PackageComaintainers (UsersID); +CREATE INDEX ComaintainersPackageBaseID ON PackageComaintainers (PackageBaseID); -- Package base notifications -- @@ -343,27 +336,27 @@ CREATE TABLE PackageRequests ( RequestTS BIGINT UNSIGNED NOT NULL DEFAULT 0, Status TINYINT UNSIGNED NOT NULL DEFAULT 0, PRIMARY KEY (ID), - INDEX (UsersID), - INDEX (PackageBaseID), FOREIGN KEY (ReqTypeID) REFERENCES RequestTypes(ID) ON DELETE NO ACTION, FOREIGN KEY (UsersID) REFERENCES Users(ID) ON DELETE SET NULL, FOREIGN KEY (PackageBaseID) REFERENCES PackageBases(ID) ON DELETE SET NULL ) ENGINE = InnoDB; +CREATE INDEX RequestsUsersID ON PackageRequests (UsersID); +CREATE INDEX RequestsPackageBaseID ON PackageRequests (PackageBaseID); -- Vote information -- CREATE TABLE IF NOT EXISTS TU_VoteInfo ( - ID int(10) unsigned NOT NULL auto_increment, - Agenda text NOT NULL, + ID INTEGER UNSIGNED NOT NULL AUTO_INCREMENT, + Agenda TEXT NOT NULL, User VARCHAR(32) NOT NULL, - Submitted bigint(20) unsigned NOT NULL, - End bigint(20) unsigned NOT NULL, - Quorum decimal(2, 2) unsigned NOT NULL, - SubmitterID int(10) unsigned NOT NULL, - Yes tinyint(3) unsigned NOT NULL default '0', - No tinyint(3) unsigned NOT NULL default '0', - Abstain tinyint(3) unsigned NOT NULL default '0', - ActiveTUs tinyint(3) unsigned NOT NULL default '0', + Submitted BIGINT UNSIGNED NOT NULL, + End BIGINT UNSIGNED NOT NULL, + Quorum DECIMAL(2, 2) UNSIGNED NOT NULL, + SubmitterID INTEGER UNSIGNED NOT NULL, + Yes TINYINT(3) UNSIGNED NOT NULL DEFAULT '0', + No TINYINT(3) UNSIGNED NOT NULL DEFAULT '0', + Abstain TINYINT(3) UNSIGNED NOT NULL DEFAULT '0', + ActiveTUs TINYINT(3) UNSIGNED NOT NULL DEFAULT '0', PRIMARY KEY (ID), FOREIGN KEY (SubmitterID) REFERENCES Users(ID) ON DELETE CASCADE ) ENGINE = InnoDB; @@ -371,8 +364,8 @@ CREATE TABLE IF NOT EXISTS TU_VoteInfo ( -- Individual vote records -- CREATE TABLE IF NOT EXISTS TU_Votes ( - VoteID int(10) unsigned NOT NULL, - UserID int(10) unsigned NOT NULL, + VoteID INTEGER UNSIGNED NOT NULL, + UserID INTEGER UNSIGNED NOT NULL, FOREIGN KEY (VoteID) REFERENCES TU_VoteInfo(ID) ON DELETE CASCADE, FOREIGN KEY (UserID) REFERENCES Users(ID) ON DELETE CASCADE ) ENGINE = InnoDB; -- 2.9.2
Add basic tests for the Git interface. The test suite is based on sharness. Signed-off-by: Lukas Fleischer <lfleischer@archlinux.org> --- git-interface/test/Makefile | 11 + git-interface/test/setup.sh | 121 ++++++ git-interface/test/sharness.sh | 851 +++++++++++++++++++++++++++++++++++++ git-interface/test/t0001-auth.sh | 19 + git-interface/test/t0002-serve.sh | 26 ++ git-interface/test/t0003-update.sh | 20 + 6 files changed, 1048 insertions(+) create mode 100644 git-interface/test/Makefile create mode 100644 git-interface/test/setup.sh create mode 100644 git-interface/test/sharness.sh create mode 100755 git-interface/test/t0001-auth.sh create mode 100755 git-interface/test/t0002-serve.sh create mode 100755 git-interface/test/t0003-update.sh diff --git a/git-interface/test/Makefile b/git-interface/test/Makefile new file mode 100644 index 0000000..d6f0f74 --- /dev/null +++ b/git-interface/test/Makefile @@ -0,0 +1,11 @@ +T = $(sort $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)) + +check: $(T) + +clean: + $(RM) -r test-results/ + +$(T): + @echo "*** $@ ***"; $(SHELL) $@ + +.PHONY: check clean $(T) diff --git a/git-interface/test/setup.sh b/git-interface/test/setup.sh new file mode 100644 index 0000000..eecb4d5 --- /dev/null +++ b/git-interface/test/setup.sh @@ -0,0 +1,121 @@ +TEST_DIRECTORY="$(pwd)" + +. ./sharness.sh + +# Configure paths to the Git interface scripts. +GIT_AUTH="$TEST_DIRECTORY/../git-auth.py" +GIT_SERVE="$TEST_DIRECTORY/../git-serve.py" +GIT_UPDATE="$TEST_DIRECTORY/../git-update.py" + +# Create the configuration file and a dummy notification script. +cat >config <<-EOF +[database] +backend = sqlite +name = aur.db + +[options] +enable-maintenance = 0 +maintenance-exceptions = 127.0.0.1 + +[notifications] +notify-cmd = ./notify.sh + +[auth] +valid-keytypes = ssh-rsa ssh-dss ecdsa-sha2-nistp256 ecdsa-sha2-nistp384 ecdsa-sha2-nistp521 ssh-ed25519 +username-regex = [a-zA-Z0-9]+[.\-_]?[a-zA-Z0-9]+$ +git-serve-cmd = /srv/http/aurweb/git-interface/git-serve.py +ssh-options = restrict + +[serve] +repo-path = ./aur.git/ +repo-regex = [a-z0-9][a-z0-9.+_-]*$ +git-shell-cmd = /usr/bin/git-shell +git-update-cmd = /srv/http/aurweb/git-interface/git-update.py +ssh-cmdline = ssh aur@aur.archlinux.org + +[update] +max-blob-size = 256000 +EOF + +cat >notify.sh <<-EOF +#!/bin/sh +EOF +chmod +x notify.sh + +AUR_CONFIG=config +export AUR_CONFIG + +# Create SSH public keys which will be used by the test users later. +AUTH_KEYTYPE_USER=ssh-rsa +AUTH_KEYTEXT_USER=AAAAB3NzaC1yc2EAAAADAQABAAABAQCeUafDK4jqUiRHNQfwHcYjBKLZ4Rc1sNUofHApBP6j91nIvDHZe2VUqeBmFUhBz7kXK4VbXD9nlHMun2HeshL8hXnMzymZ8Wk7+IKefj61pajJkIdttw9Tnayfg7uhg5RbFy9zpEjmGjnIVjSzOXKCwppNT+CNujpKM5FD8gso/Z+l3fD+IwrPwS1SzF1Z99nqI9n2FM/JWZqluvTqnW9WdAvBDfutXxp0R5ZiLI5TAKL2Ssp5rpL70pkLXhv+9sK545zKKlXUFmw6Pi2iVBdqdRsk9ocl49dLiNIh8CYDCO3CRQn+8EnpBhTor2TKQxGJI3mzoBwWJJxoKhD/XlYJ +AUTH_FINGERPRINT_USER=SHA256:F/OFtYAy0JCytAGUi4RUZnOsThhQtFMK7fH1YvFBCpo + +AUTH_KEYTYPE_TU=ssh-rsa +AUTH_KEYTEXT_TU=AAAAB3NzaC1yc2EAAAADAQABAAABAQC4Q2Beg6jf2r1LZ4vwT5y10dK8+/c5RaNyTwv77wF2OSLXh32xW0ovhE2lW2gqoakdGsxgM2fTtqMTl29WOsAxlGF7x9XbWhFXFUT88Daq1fAeuihkiRjfBbInSW/WcrFZ+biLBch67addtfkkd4PmAafDeeCtszAXqza+ltBG1oxAGiTXgI3LOhA1/GtLLxsi5sPUO3ZlhvwDn4Sy0aXYx8l9hop/PU4Cjn82hyRa9r+SRxQ3KtjKxcVMnZ8IyXOrBwXTukgSBR/6nSdEmO0JPkYUFuNwh3UGFKuNkrPguL5T+4YDym6czYmZJzQ7NNl2pLKYmYgBwBe5rORlWfN5 +AUTH_FINGERPRINT_TU=SHA256:xQGC6j/U1Q3NDXLl04pm+Shr1mjYUXbGMUzlm9vby4k + +# Initialize the test database. +rm -f aur.db +sed \ + -e '/^DROP DATABASE /d' \ + -e '/^CREATE DATABASE /d' \ + -e '/^USE /d' \ + -e 's/ ENGINE = InnoDB//' \ + -e 's/ [A-Z]* UNSIGNED NOT NULL AUTO_INCREMENT/ INTEGER NOT NULL/' \ + -e 's/([0-9, ]*) UNSIGNED / UNSIGNED /' \ + "$TEST_DIRECTORY/../../schema/aur-schema.sql" | sqlite3 aur.db + +echo "INSERT INTO Users (ID, UserName, Passwd, Email, AccountTypeID) VALUES (1, 'user', '!', 'user@localhost', 1);" | sqlite3 aur.db +echo "INSERT INTO Users (ID, UserName, Passwd, Email, AccountTypeID) VALUES (2, 'tu', '!', 'tu@localhost', 2);" | sqlite3 aur.db + +echo "INSERT INTO SSHPubKeys (UserID, Fingerprint, PubKey) VALUES (1, '$AUTH_FINGERPRINT_USER', '$AUTH_KEYTYPE_USER $AUTH_KEYTEXT_USER');" | sqlite3 aur.db +echo "INSERT INTO SSHPubKeys (UserID, Fingerprint, PubKey) VALUES (2, '$AUTH_FINGERPRINT_TU', '$AUTH_KEYTYPE_TU $AUTH_KEYTEXT_TU');" | sqlite3 aur.db + +# Initialize a Git repository to store test packages in. +( + GIT_AUTHOR_EMAIL=author@example.com + GIT_AUTHOR_NAME='A U Thor' + GIT_COMMITTER_EMAIL=committer@example.com + GIT_COMMITTER_NAME='C O Mitter' + export GIT_AUTHOR_EMAIL GIT_AUTHOR_NAME + export GIT_COMMITTER_EMAIL GIT_COMMITTER_NAME + + mkdir aur.git + cd aur.git + + git init -q + git checkout -q -b refs/namespaces/foobar/refs/heads/master + + cat >PKGBUILD <<-EOF + pkgname=foobar + pkgver=1 + pkgrel=1 + pkgdesc='aurweb test package.' + url='https://aur.archlinux.org/' + license=('GPL') + arch=('any') + depends=('python-pygit2') + source=() + md5sums=() + + package() { + echo 'Hello world!' + } + EOF + + cat >.SRCINFO <<-EOF + pkgbase = foobar + pkgdesc = aurweb test package. + pkgver = 1 + pkgrel = 1 + url = https://aur.archlinux.org/ + arch = any + license = GPL + depends = python-pygit2 + + pkgname = foobar + EOF + + git add PKGBUILD .SRCINFO + git commit -q -am 'Initial import' +) diff --git a/git-interface/test/sharness.sh b/git-interface/test/sharness.sh new file mode 100644 index 0000000..1d57ce9 --- /dev/null +++ b/git-interface/test/sharness.sh @@ -0,0 +1,851 @@ +#!/bin/sh +# +# Copyright (c) 2011-2012 Mathias Lafeldt +# Copyright (c) 2005-2012 Git project +# Copyright (c) 2005-2012 Junio C Hamano +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see http://www.gnu.org/licenses/ . + +# Public: Current version of Sharness. +SHARNESS_VERSION="1.0.0" +export SHARNESS_VERSION + +# Public: The file extension for tests. By default, it is set to "t". +: ${SHARNESS_TEST_EXTENSION:=t} +export SHARNESS_TEST_EXTENSION + +# Reset TERM to original terminal if found, otherwise save orignal TERM +[ "x" = "x$SHARNESS_ORIG_TERM" ] && + SHARNESS_ORIG_TERM="$TERM" || + TERM="$SHARNESS_ORIG_TERM" +# Public: The unsanitized TERM under which sharness is originally run +export SHARNESS_ORIG_TERM + +# Export SHELL_PATH +: ${SHELL_PATH:=$SHELL} +export SHELL_PATH + +# For repeatability, reset the environment to a known state. +# TERM is sanitized below, after saving color control sequences. +LANG=C +LC_ALL=C +PAGER=cat +TZ=UTC +EDITOR=: +export LANG LC_ALL PAGER TZ EDITOR +unset VISUAL CDPATH GREP_OPTIONS + +# Line feed +LF=' +' + +[ "x$TERM" != "xdumb" ] && ( + [ -t 1 ] && + tput bold >/dev/null 2>&1 && + tput setaf 1 >/dev/null 2>&1 && + tput sgr0 >/dev/null 2>&1 + ) && + color=t + +while test "$#" -ne 0; do + case "$1" in + -d|--d|--de|--deb|--debu|--debug) + debug=t; shift ;; + -i|--i|--im|--imm|--imme|--immed|--immedi|--immedia|--immediat|--immediate) + immediate=t; shift ;; + -l|--l|--lo|--lon|--long|--long-|--long-t|--long-te|--long-tes|--long-test|--long-tests) + TEST_LONG=t; export TEST_LONG; shift ;; + --in|--int|--inte|--inter|--intera|--interac|--interact|--interacti|--interactiv|--interactive|--interactive-|--interactive-t|--interactive-te|--interactive-tes|--interactive-test|--interactive-tests): + TEST_INTERACTIVE=t; export TEST_INTERACTIVE; verbose=t; shift ;; + -h|--h|--he|--hel|--help) + help=t; shift ;; + -v|--v|--ve|--ver|--verb|--verbo|--verbos|--verbose) + verbose=t; shift ;; + -q|--q|--qu|--qui|--quie|--quiet) + # Ignore --quiet under a TAP::Harness. Saying how many tests + # passed without the ok/not ok details is always an error. + test -z "$HARNESS_ACTIVE" && quiet=t; shift ;; + --chain-lint) + chain_lint=t; shift ;; + --no-chain-lint) + chain_lint=; shift ;; + --no-color) + color=; shift ;; + --root=*) + root=$(expr "z$1" : 'z[^=]*=\(.*\)') + shift ;; + *) + echo "error: unknown test option '$1'" >&2; exit 1 ;; + esac +done + +if test -n "$color"; then + # Save the color control sequences now rather than run tput + # each time say_color() is called. This is done for two + # reasons: + # * TERM will be changed to dumb + # * HOME will be changed to a temporary directory and tput + # might need to read ~/.terminfo from the original HOME + # directory to get the control sequences + # Note: This approach assumes the control sequences don't end + # in a newline for any terminal of interest (command + # substitutions strip trailing newlines). Given that most + # (all?) terminals in common use are related to ECMA-48, this + # shouldn't be a problem. + say_color_error=$(tput bold; tput setaf 1) # bold red + say_color_skip=$(tput setaf 4) # blue + say_color_warn=$(tput setaf 3) # brown/yellow + say_color_pass=$(tput setaf 2) # green + say_color_info=$(tput setaf 6) # cyan + say_color_reset=$(tput sgr0) + say_color_="" # no formatting for normal text + say_color() { + test -z "$1" && test -n "$quiet" && return + eval "say_color_color=\$say_color_$1" + shift + printf "%s\\n" "$say_color_color$*$say_color_reset" + } +else + say_color() { + test -z "$1" && test -n "$quiet" && return + shift + printf "%s\n" "$*" + } +fi + +TERM=dumb +export TERM + +error() { + say_color error "error: $*" + EXIT_OK=t + exit 1 +} + +say() { + say_color info "$*" +} + +test -n "$test_description" || error "Test script did not set test_description." + +if test "$help" = "t"; then + echo "$test_description" + exit 0 +fi + +exec 5>&1 +exec 6<&0 +if test "$verbose" = "t"; then + exec 4>&2 3>&1 +else + exec 4>/dev/null 3>/dev/null +fi + +test_failure=0 +test_count=0 +test_fixed=0 +test_broken=0 +test_success=0 + +die() { + code=$? + if test -n "$EXIT_OK"; then + exit $code + else + echo >&5 "FATAL: Unexpected exit with code $code" + exit 1 + fi +} + +EXIT_OK= +trap 'die' EXIT + +# Public: Define that a test prerequisite is available. +# +# The prerequisite can later be checked explicitly using test_have_prereq or +# implicitly by specifying the prerequisite name in calls to test_expect_success +# or test_expect_failure. +# +# $1 - Name of prerequiste (a simple word, in all capital letters by convention) +# +# Examples +# +# # Set PYTHON prerequisite if interpreter is available. +# command -v python >/dev/null && test_set_prereq PYTHON +# +# # Set prerequisite depending on some variable. +# test -z "$NO_GETTEXT" && test_set_prereq GETTEXT +# +# Returns nothing. +test_set_prereq() { + satisfied_prereq="$satisfied_prereq$1 " +} +satisfied_prereq=" " + +# Public: Check if one or more test prerequisites are defined. +# +# The prerequisites must have previously been set with test_set_prereq. +# The most common use of this is to skip all the tests if some essential +# prerequisite is missing. +# +# $1 - Comma-separated list of test prerequisites. +# +# Examples +# +# # Skip all remaining tests if prerequisite is not set. +# if ! test_have_prereq PERL; then +# skip_all='skipping perl interface tests, perl not available' +# test_done +# fi +# +# Returns 0 if all prerequisites are defined or 1 otherwise. +test_have_prereq() { + # prerequisites can be concatenated with ',' + save_IFS=$IFS + IFS=, + set -- $* + IFS=$save_IFS + + total_prereq=0 + ok_prereq=0 + missing_prereq= + + for prerequisite; do + case "$prerequisite" in + !*) + negative_prereq=t + prerequisite=${prerequisite#!} + ;; + *) + negative_prereq= + esac + + total_prereq=$(($total_prereq + 1)) + case "$satisfied_prereq" in + *" $prerequisite "*) + satisfied_this_prereq=t + ;; + *) + satisfied_this_prereq= + esac + + case "$satisfied_this_prereq,$negative_prereq" in + t,|,t) + ok_prereq=$(($ok_prereq + 1)) + ;; + *) + # Keep a list of missing prerequisites; restore + # the negative marker if necessary. + prerequisite=${negative_prereq:+!}$prerequisite + if test -z "$missing_prereq"; then + missing_prereq=$prerequisite + else + missing_prereq="$prerequisite,$missing_prereq" + fi + esac + done + + test $total_prereq = $ok_prereq +} + +# You are not expected to call test_ok_ and test_failure_ directly, use +# the text_expect_* functions instead. + +test_ok_() { + test_success=$(($test_success + 1)) + say_color "" "ok $test_count - $@" +} + +test_failure_() { + test_failure=$(($test_failure + 1)) + say_color error "not ok $test_count - $1" + shift + echo "$@" | sed -e 's/^/# /' + test "$immediate" = "" || { EXIT_OK=t; exit 1; } +} + +test_known_broken_ok_() { + test_fixed=$(($test_fixed + 1)) + say_color error "ok $test_count - $@ # TODO known breakage vanished" +} + +test_known_broken_failure_() { + test_broken=$(($test_broken + 1)) + say_color warn "not ok $test_count - $@ # TODO known breakage" +} + +# Public: Execute commands in debug mode. +# +# Takes a single argument and evaluates it only when the test script is started +# with --debug. This is primarily meant for use during the development of test +# scripts. +# +# $1 - Commands to be executed. +# +# Examples +# +# test_debug "cat some_log_file" +# +# Returns the exit code of the last command executed in debug mode or 0 +# otherwise. +test_debug() { + test "$debug" = "" || eval "$1" +} + +# Public: Stop execution and start a shell. +# +# This is useful for debugging tests and only makes sense together with "-v". +# Be sure to remove all invocations of this command before submitting. +test_pause() { + if test "$verbose" = t; then + "$SHELL_PATH" <&6 >&3 2>&4 + else + error >&5 "test_pause requires --verbose" + fi +} + +test_eval_() { + # This is a separate function because some tests use + # "return" to end a test_expect_success block early. + case ",$test_prereq," in + *,INTERACTIVE,*) + eval "$*" + ;; + *) + eval </dev/null >&3 2>&4 "$*" + ;; + esac +} + +test_run_() { + test_cleanup=: + expecting_failure=$2 + test_eval_ "$1" + eval_ret=$? + + if test "$chain_lint" = "t"; then + test_eval_ "(exit 117) && $1" + if test "$?" != 117; then + error "bug in the test script: broken &&-chain: $1" + fi + fi + + if test -z "$immediate" || test $eval_ret = 0 || test -n "$expecting_failure"; then + test_eval_ "$test_cleanup" + fi + if test "$verbose" = "t" && test -n "$HARNESS_ACTIVE"; then + echo "" + fi + return "$eval_ret" +} + +test_skip_() { + test_count=$(($test_count + 1)) + to_skip= + for skp in $SKIP_TESTS; do + case $this_test.$test_count in + $skp) + to_skip=t + break + esac + done + if test -z "$to_skip" && test -n "$test_prereq" && ! test_have_prereq "$test_prereq"; then + to_skip=t + fi + case "$to_skip" in + t) + of_prereq= + if test "$missing_prereq" != "$test_prereq"; then + of_prereq=" of $test_prereq" + fi + + say_color skip >&3 "skipping test: $@" + say_color skip "ok $test_count # skip $1 (missing $missing_prereq${of_prereq})" + : true + ;; + *) + false + ;; + esac +} + +# Public: Run test commands and expect them to succeed. +# +# When the test passed, an "ok" message is printed and the number of successful +# tests is incremented. When it failed, a "not ok" message is printed and the +# number of failed tests is incremented. +# +# With --immediate, exit test immediately upon the first failed test. +# +# Usually takes two arguments: +# $1 - Test description +# $2 - Commands to be executed. +# +# With three arguments, the first will be taken to be a prerequisite: +# $1 - Comma-separated list of test prerequisites. The test will be skipped if +# not all of the given prerequisites are set. To negate a prerequisite, +# put a "!" in front of it. +# $2 - Test description +# $3 - Commands to be executed. +# +# Examples +# +# test_expect_success \ +# 'git-write-tree should be able to write an empty tree.' \ +# 'tree=$(git-write-tree)' +# +# # Test depending on one prerequisite. +# test_expect_success TTY 'git --paginate rev-list uses a pager' \ +# ' ... ' +# +# # Multiple prerequisites are separated by a comma. +# test_expect_success PERL,PYTHON 'yo dawg' \ +# ' test $(perl -E 'print eval "1 +" . qx[python -c "print 2"]') == "4" ' +# +# Returns nothing. +test_expect_success() { + test "$#" = 3 && { test_prereq=$1; shift; } || test_prereq= + test "$#" = 2 || error "bug in the test script: not 2 or 3 parameters to test_expect_success" + export test_prereq + if ! test_skip_ "$@"; then + say >&3 "expecting success: $2" + if test_run_ "$2"; then + test_ok_ "$1" + else + test_failure_ "$@" + fi + fi + echo >&3 "" +} + +# Public: Run test commands and expect them to fail. Used to demonstrate a known +# breakage. +# +# This is NOT the opposite of test_expect_success, but rather used to mark a +# test that demonstrates a known breakage. +# +# When the test passed, an "ok" message is printed and the number of fixed tests +# is incremented. When it failed, a "not ok" message is printed and the number +# of tests still broken is incremented. +# +# Failures from these tests won't cause --immediate to stop. +# +# Usually takes two arguments: +# $1 - Test description +# $2 - Commands to be executed. +# +# With three arguments, the first will be taken to be a prerequisite: +# $1 - Comma-separated list of test prerequisites. The test will be skipped if +# not all of the given prerequisites are set. To negate a prerequisite, +# put a "!" in front of it. +# $2 - Test description +# $3 - Commands to be executed. +# +# Returns nothing. +test_expect_failure() { + test "$#" = 3 && { test_prereq=$1; shift; } || test_prereq= + test "$#" = 2 || error "bug in the test script: not 2 or 3 parameters to test_expect_failure" + export test_prereq + if ! test_skip_ "$@"; then + say >&3 "checking known breakage: $2" + if test_run_ "$2" expecting_failure; then + test_known_broken_ok_ "$1" + else + test_known_broken_failure_ "$1" + fi + fi + echo >&3 "" +} + +# Public: Run command and ensure that it fails in a controlled way. +# +# Use it instead of "! <command>". For example, when <command> dies due to a +# segfault, test_must_fail diagnoses it as an error, while "! <command>" would +# mistakenly be treated as just another expected failure. +# +# This is one of the prefix functions to be used inside test_expect_success or +# test_expect_failure. +# +# $1.. - Command to be executed. +# +# Examples +# +# test_expect_success 'complain and die' ' +# do something && +# do something else && +# test_must_fail git checkout ../outerspace +# ' +# +# Returns 1 if the command succeeded (exit code 0). +# Returns 1 if the command died by signal (exit codes 130-192) +# Returns 1 if the command could not be found (exit code 127). +# Returns 0 otherwise. +test_must_fail() { + "$@" + exit_code=$? + if test $exit_code = 0; then + echo >&2 "test_must_fail: command succeeded: $*" + return 1 + elif test $exit_code -gt 129 -a $exit_code -le 192; then + echo >&2 "test_must_fail: died by signal: $*" + return 1 + elif test $exit_code = 127; then + echo >&2 "test_must_fail: command not found: $*" + return 1 + fi + return 0 +} + +# Public: Run command and ensure that it succeeds or fails in a controlled way. +# +# Similar to test_must_fail, but tolerates success too. Use it instead of +# "<command> || :" to catch failures caused by a segfault, for instance. +# +# This is one of the prefix functions to be used inside test_expect_success or +# test_expect_failure. +# +# $1.. - Command to be executed. +# +# Examples +# +# test_expect_success 'some command works without configuration' ' +# test_might_fail git config --unset all.configuration && +# do something +# ' +# +# Returns 1 if the command died by signal (exit codes 130-192) +# Returns 1 if the command could not be found (exit code 127). +# Returns 0 otherwise. +test_might_fail() { + "$@" + exit_code=$? + if test $exit_code -gt 129 -a $exit_code -le 192; then + echo >&2 "test_might_fail: died by signal: $*" + return 1 + elif test $exit_code = 127; then + echo >&2 "test_might_fail: command not found: $*" + return 1 + fi + return 0 +} + +# Public: Run command and ensure it exits with a given exit code. +# +# This is one of the prefix functions to be used inside test_expect_success or +# test_expect_failure. +# +# $1 - Expected exit code. +# $2.. - Command to be executed. +# +# Examples +# +# test_expect_success 'Merge with d/f conflicts' ' +# test_expect_code 1 git merge "merge msg" B master +# ' +# +# Returns 0 if the expected exit code is returned or 1 otherwise. +test_expect_code() { + want_code=$1 + shift + "$@" + exit_code=$? + if test $exit_code = $want_code; then + return 0 + fi + + echo >&2 "test_expect_code: command exited with $exit_code, we wanted $want_code $*" + return 1 +} + +# Public: Compare two files to see if expected output matches actual output. +# +# The TEST_CMP variable defines the command used for the comparision; it +# defaults to "diff -u". Only when the test script was started with --verbose, +# will the command's output, the diff, be printed to the standard output. +# +# This is one of the prefix functions to be used inside test_expect_success or +# test_expect_failure. +# +# $1 - Path to file with expected output. +# $2 - Path to file with actual output. +# +# Examples +# +# test_expect_success 'foo works' ' +# echo expected >expected && +# foo >actual && +# test_cmp expected actual +# ' +# +# Returns the exit code of the command set by TEST_CMP. +test_cmp() { + ${TEST_CMP:-diff -u} "$@" +} + +# Public: portably print a sequence of numbers. +# +# seq is not in POSIX and GNU seq might not be available everywhere, +# so it is nice to have a seq implementation, even a very simple one. +# +# $1 - Starting number. +# $2 - Ending number. +# +# Examples +# +# test_expect_success 'foo works 10 times' ' +# for i in $(test_seq 1 10) +# do +# foo || return +# done +# ' +# +# Returns 0 if all the specified numbers can be displayed. +test_seq() { + i="$1" + j="$2" + while test "$i" -le "$j" + do + echo "$i" || return + i=$(expr "$i" + 1) + done +} + +# Public: Check if the file expected to be empty is indeed empty, and barfs +# otherwise. +# +# $1 - File to check for emptyness. +# +# Returns 0 if file is empty, 1 otherwise. +test_must_be_empty() { + if test -s "$1" + then + echo "'$1' is not empty, it contains:" + cat "$1" + return 1 + fi +} + +# Public: Schedule cleanup commands to be run unconditionally at the end of a +# test. +# +# If some cleanup command fails, the test will not pass. With --immediate, no +# cleanup is done to help diagnose what went wrong. +# +# This is one of the prefix functions to be used inside test_expect_success or +# test_expect_failure. +# +# $1.. - Commands to prepend to the list of cleanup commands. +# +# Examples +# +# test_expect_success 'test core.capslock' ' +# git config core.capslock true && +# test_when_finished "git config --unset core.capslock" && +# do_something +# ' +# +# Returns the exit code of the last cleanup command executed. +test_when_finished() { + test_cleanup="{ $* + } && (exit \"\$eval_ret\"); eval_ret=\$?; $test_cleanup" +} + +# Public: Schedule cleanup commands to be run unconditionally when all tests +# have run. +# +# This can be used to clean up things like test databases. It is not needed to +# clean up temporary files, as test_done already does that. +# +# Examples: +# +# cleanup mysql -e "DROP DATABASE mytest" +# +# Returns the exit code of the last cleanup command executed. +final_cleanup= +cleanup() { + final_cleanup="{ $* + } && (exit \"\$eval_ret\"); eval_ret=\$?; $final_cleanup" +} + +# Public: Summarize test results and exit with an appropriate error code. +# +# Must be called at the end of each test script. +# +# Can also be used to stop tests early and skip all remaining tests. For this, +# set skip_all to a string explaining why the tests were skipped before calling +# test_done. +# +# Examples +# +# # Each test script must call test_done at the end. +# test_done +# +# # Skip all remaining tests if prerequisite is not set. +# if ! test_have_prereq PERL; then +# skip_all='skipping perl interface tests, perl not available' +# test_done +# fi +# +# Returns 0 if all tests passed or 1 if there was a failure. +test_done() { + EXIT_OK=t + + if test -z "$HARNESS_ACTIVE"; then + test_results_dir="$SHARNESS_TEST_DIRECTORY/test-results" + mkdir -p "$test_results_dir" + test_results_path="$test_results_dir/$this_test.$$.counts" + + cat >>"$test_results_path" <<-EOF + total $test_count + success $test_success + fixed $test_fixed + broken $test_broken + failed $test_failure + + EOF + fi + + if test "$test_fixed" != 0; then + say_color error "# $test_fixed known breakage(s) vanished; please update test(s)" + fi + if test "$test_broken" != 0; then + say_color warn "# still have $test_broken known breakage(s)" + fi + if test "$test_broken" != 0 || test "$test_fixed" != 0; then + test_remaining=$(( $test_count - $test_broken - $test_fixed )) + msg="remaining $test_remaining test(s)" + else + test_remaining=$test_count + msg="$test_count test(s)" + fi + + case "$test_failure" in + 0) + # Maybe print SKIP message + if test -n "$skip_all" && test $test_count -gt 0; then + error "Can't use skip_all after running some tests" + fi + [ -z "$skip_all" ] || skip_all=" # SKIP $skip_all" + + if test $test_remaining -gt 0; then + say_color pass "# passed all $msg" + fi + say "1..$test_count$skip_all" + + test_eval_ "$final_cleanup" + + test -d "$remove_trash" && + cd "$(dirname "$remove_trash")" && + rm -rf "$(basename "$remove_trash")" + + exit 0 ;; + + *) + say_color error "# failed $test_failure among $msg" + say "1..$test_count" + + exit 1 ;; + + esac +} + +# Public: Root directory containing tests. Tests can override this variable, +# e.g. for testing Sharness itself. +: ${SHARNESS_TEST_DIRECTORY:=$(pwd)} +export SHARNESS_TEST_DIRECTORY + +# Public: Source directory of test code and sharness library. +# This directory may be different from the directory in which tests are +# being run. +: ${SHARNESS_TEST_SRCDIR:=$(cd $(dirname $0) && pwd)} +export SHARNESS_TEST_SRCDIR + +# Public: Build directory that will be added to PATH. By default, it is set to +# the parent directory of SHARNESS_TEST_DIRECTORY. +: ${SHARNESS_BUILD_DIRECTORY:="$SHARNESS_TEST_DIRECTORY/.."} +PATH="$SHARNESS_BUILD_DIRECTORY:$PATH" +export PATH SHARNESS_BUILD_DIRECTORY + +# Public: Path to test script currently executed. +SHARNESS_TEST_FILE="$0" +export SHARNESS_TEST_FILE + +# Prepare test area. +SHARNESS_TRASH_DIRECTORY="trash directory.$(basename "$SHARNESS_TEST_FILE" ".$SHARNESS_TEST_EXTENSION")" +test -n "$root" && SHARNESS_TRASH_DIRECTORY="$root/$SHARNESS_TRASH_DIRECTORY" +case "$SHARNESS_TRASH_DIRECTORY" in +/*) ;; # absolute path is good + *) SHARNESS_TRASH_DIRECTORY="$SHARNESS_TEST_DIRECTORY/$SHARNESS_TRASH_DIRECTORY" ;; +esac +test "$debug" = "t" || remove_trash="$SHARNESS_TRASH_DIRECTORY" +rm -rf "$SHARNESS_TRASH_DIRECTORY" || { + EXIT_OK=t + echo >&5 "FATAL: Cannot prepare test area" + exit 1 +} + + +# +# Load any extensions in $srcdir/sharness.d/*.sh +# +if test -d "${SHARNESS_TEST_SRCDIR}/sharness.d" +then + for file in "${SHARNESS_TEST_SRCDIR}"/sharness.d/*.sh + do + # Ensure glob was not an empty match: + test -e "${file}" || break + + if test -n "$debug" + then + echo >&5 "sharness: loading extensions from ${file}" + fi + . "${file}" + if test $? != 0 + then + echo >&5 "sharness: Error loading ${file}. Aborting." + exit 1 + fi + done +fi + +# Public: Empty trash directory, the test area, provided for each test. The HOME +# variable is set to that directory too. +export SHARNESS_TRASH_DIRECTORY + +HOME="$SHARNESS_TRASH_DIRECTORY" +export HOME + +mkdir -p "$SHARNESS_TRASH_DIRECTORY" || exit 1 +# Use -P to resolve symlinks in our working directory so that the cwd +# in subprocesses like git equals our $PWD (for pathname comparisons). +cd -P "$SHARNESS_TRASH_DIRECTORY" || exit 1 + +this_test=${SHARNESS_TEST_FILE##*/} +this_test=${this_test%.$SHARNESS_TEST_EXTENSION} +for skp in $SKIP_TESTS; do + case "$this_test" in + $skp) + say_color info >&3 "skipping test $this_test altogether" + skip_all="skip all tests in $this_test" + test_done + esac +done + +test -n "$TEST_LONG" && test_set_prereq EXPENSIVE +test -n "$TEST_INTERACTIVE" && test_set_prereq INTERACTIVE + +# Make sure this script ends with code 0 +: + +# vi: set ts=4 sw=4 noet : diff --git a/git-interface/test/t0001-auth.sh b/git-interface/test/t0001-auth.sh new file mode 100755 index 0000000..50ef510 --- /dev/null +++ b/git-interface/test/t0001-auth.sh @@ -0,0 +1,19 @@ +#!/bin/sh + +test_description='git-auth tests' + +. ./setup.sh + +test_expect_success 'Test basic authentication.' ' + "$GIT_AUTH" "$AUTH_KEYTYPE_USER" "$AUTH_KEYTEXT_USER" >out && + grep -q AUR_USER=user out && + grep -q AUR_PRIVILEGED=0 out +' + +test_expect_success 'Test Trusted User authentication.' ' + "$GIT_AUTH" "$AUTH_KEYTYPE_TU" "$AUTH_KEYTEXT_TU" >out && + grep -q AUR_USER=tu out && + grep -q AUR_PRIVILEGED=1 out +' + +test_done diff --git a/git-interface/test/t0002-serve.sh b/git-interface/test/t0002-serve.sh new file mode 100755 index 0000000..7e17bcb --- /dev/null +++ b/git-interface/test/t0002-serve.sh @@ -0,0 +1,26 @@ +#!/bin/sh + +test_description='git-serve tests' + +. ./setup.sh + +test_expect_success 'Test interactive shell.' ' + "$GIT_SERVE" 2>&1 | grep -q "Interactive shell is disabled." +' + +test_expect_success 'Test help.' ' + SSH_ORIGINAL_COMMAND=help "$GIT_SERVE" 2>&1 | grep -q "^Commands:$" +' + +test_expect_success 'Test setup-repo and list-repos.' ' + SSH_ORIGINAL_COMMAND="setup-repo foobar" AUR_USER=user \ + "$GIT_SERVE" 2>&1 && + cat >expected <<-EOF && + *foobar + EOF + SSH_ORIGINAL_COMMAND="list-repos" AUR_USER=user \ + "$GIT_SERVE" 2>&1 >actual && + test_cmp expected actual +' + +test_done diff --git a/git-interface/test/t0003-update.sh b/git-interface/test/t0003-update.sh new file mode 100755 index 0000000..0e8962c --- /dev/null +++ b/git-interface/test/t0003-update.sh @@ -0,0 +1,20 @@ +#!/bin/sh + +test_description='git-update tests' + +. ./setup.sh + +test_expect_success 'Test update hook.' ' + old=0000000000000000000000000000000000000000 && + new=$(git -C aur.git rev-parse HEAD) && + SSH_ORIGINAL_COMMAND="setup-repo foobar" AUR_USER=user "$GIT_SERVE" && + AUR_USER=user AUR_PKGBASE=foobar AUR_PRIVILEGED=0 \ + "$GIT_UPDATE" refs/heads/master "$old" "$new" 2>&1 && + cat >expected <<-EOF && + 1|1|foobar|1-1|aurweb test package.|https://aur.archlinux.org/ + EOF + echo "SELECT * FROM Packages;" | sqlite3 aur.db >actual && + test_cmp expected actual +' + +test_done -- 2.9.2
participants (1)
-
Lukas Fleischer