Compare commits

...

12 commits

Author SHA1 Message Date
Lysann Tranvouez
f63276a253 test resetting local changes 2026-03-09 12:57:23 +01:00
Lysann Tranvouez
e0d0e5c912 check file system and commits upon changes to store 2026-03-09 12:57:23 +01:00
Lysann Tranvouez
d02d4bd68b fix deleting directory
this used to corrupt the local state (password entities remained in DB
but files/dirs were removed from git and disk)
2026-03-09 12:57:23 +01:00
Lysann Tranvouez
4dda6e542b test add, edit, delete 2026-03-09 11:46:26 +01:00
Lysann Tranvouez
ddd69720a1 check notification center notifications 2026-03-09 11:45:04 +01:00
Lysann Tranvouez
60aa4c634a add encrypt-save-decrypt roundtrip test 2026-03-09 11:42:38 +01:00
Lysann Tranvouez
c8da8b17cc more tests: entity fetching + erase 2026-03-09 09:49:43 +01:00
Lysann Tranvouez
ec91c98523 basic core data tests upon clone 2026-03-09 00:17:31 +01:00
Lysann Tranvouez
802ad84d30 include repo as text fixture, no need to clone from actual github 2026-03-08 23:01:38 +01:00
Lysann Tranvouez
d123d627d4 fix test cleanup 2026-03-08 22:05:58 +01:00
Lysann Tranvouez
cf79dd1e14 re-enable existing test 2026-03-08 21:57:27 +01:00
Lysann Tranvouez
520985e93e feature implementation plans 2026-03-08 21:53:17 +01:00
55 changed files with 2660 additions and 22 deletions

View file

@ -114,6 +114,7 @@
5F9D7B0D27AF6F7500A8AB22 /* CryptoTokenKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 5F9D7B0C27AF6F7300A8AB22 /* CryptoTokenKit.framework */; settings = {ATTRIBUTES = (Weak, ); }; };
5F9D7B0E27AF6FCA00A8AB22 /* CryptoTokenKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 5F9D7B0C27AF6F7300A8AB22 /* CryptoTokenKit.framework */; settings = {ATTRIBUTES = (Weak, ); }; };
5F9D7B0F27AF6FD200A8AB22 /* CryptoTokenKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 5F9D7B0C27AF6F7300A8AB22 /* CryptoTokenKit.framework */; settings = {ATTRIBUTES = (Weak, ); }; };
8AD8EBF32F5E2723007475AB /* Fixtures in Resources */ = {isa = PBXBuildFile; fileRef = 8AD8EBF22F5E268D007475AB /* Fixtures */; };
9A1D1CE526E5D1CE0052028E /* OneTimePassword in Frameworks */ = {isa = PBXBuildFile; productRef = 9A1D1CE426E5D1CE0052028E /* OneTimePassword */; };
9A1D1CE726E5D2230052028E /* OneTimePassword in Frameworks */ = {isa = PBXBuildFile; productRef = 9A1D1CE626E5D2230052028E /* OneTimePassword */; };
9A1F47FA26E5CF4B000C0E01 /* OneTimePassword in Frameworks */ = {isa = PBXBuildFile; productRef = 9A1F47F926E5CF4B000C0E01 /* OneTimePassword */; };
@ -422,6 +423,7 @@
30F6C1B327664C7200BE5AB2 /* SVProgressHUD.xcframework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcframework; name = SVProgressHUD.xcframework; path = Carthage/Build/SVProgressHUD.xcframework; sourceTree = "<group>"; };
30FD2F77214D9E0E005E0A92 /* ParserTest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ParserTest.swift; sourceTree = "<group>"; };
5F9D7B0C27AF6F7300A8AB22 /* CryptoTokenKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CryptoTokenKit.framework; path = System/Library/Frameworks/CryptoTokenKit.framework; sourceTree = SDKROOT; };
8AD8EBF22F5E268D007475AB /* Fixtures */ = {isa = PBXFileReference; lastKnownFileType = folder; path = Fixtures; sourceTree = "<group>"; };
9A1EF0B324C50DD80074FEAC /* passBeta.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = passBeta.entitlements; sourceTree = "<group>"; };
9A1EF0B424C50E780074FEAC /* passBetaAutoFillExtension.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = passBetaAutoFillExtension.entitlements; sourceTree = "<group>"; };
9A1EF0B524C50EE00074FEAC /* passBetaExtension.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = passBetaExtension.entitlements; sourceTree = "<group>"; };
@ -883,6 +885,7 @@
30A86F93230F235800F821A4 /* Crypto */,
30BAC8C322E3BA4300438475 /* Testbase */,
30697C5521F63F870064FCAC /* Extensions */,
8AD8EBF22F5E268D007475AB /* Fixtures */,
301F6464216164670071A4CE /* Helpers */,
30C015A7214ED378005BB6DF /* Models */,
30C015A6214ED32A005BB6DF /* Parser */,
@ -1427,6 +1430,7 @@
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
8AD8EBF32F5E2723007475AB /* Fixtures in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};

View file

@ -73,6 +73,7 @@
"KeyImportError." = "Cannot import the key.";
"FileNotFoundError." = "File '%@' cannot be read.";
"PasswordDuplicatedError." = "Cannot add the password; password is duplicated.";
"CannotDeleteDirectoryError." = "Cannot delete directories; delete passwords instead.";
"GitResetError." = "Cannot identify the latest synced commit.";
"GitCreateSignatureError." = "Cannot create a valid author/committer signature.";
"GitPushNotSuccessfulError." = "Pushing local changes was not successful. Make sure there are no uncommitted changes on the remote repository.";

View file

@ -34,6 +34,10 @@ public class PGPAgent {
pgpInterface = nil
}
public func isInitialized() -> Bool {
pgpInterface != nil
}
public func getKeyID() throws -> [String] {
try checkAndInit()
return pgpInterface?.keyID ?? []

View file

@ -15,6 +15,7 @@ public enum AppError: Error, Equatable {
case keyImport
case readingFile(fileName: String)
case passwordDuplicated
case cannotDeleteDirectory
case gitReset
case gitCommit
case gitCreateSignature

View file

@ -273,6 +273,10 @@ public class PasswordStore {
}
public func delete(passwordEntity: PasswordEntity) throws {
if passwordEntity.isDir {
throw AppError.cannotDeleteDirectory
}
let deletedFileURL = passwordEntity.fileURL(in: storeURL)
let deletedFilePath = passwordEntity.path
try gitRm(path: passwordEntity.path)
@ -320,11 +324,11 @@ public class PasswordStore {
saveUpdatedContext()
}
public func saveUpdatedContext() {
private func saveUpdatedContext() {
PersistenceController.shared.save()
}
public func deleteCoreData() {
private func deleteCoreData() {
PasswordEntity.deleteAll(in: context)
PersistenceController.shared.save()
}

View file

@ -0,0 +1 @@
ref: refs/heads/main

View file

@ -0,0 +1,6 @@
[core]
repositoryformatversion = 0
filemode = true
bare = true
ignorecase = true
precomposeunicode = true

View file

@ -0,0 +1 @@
Unnamed repository; edit this file 'description' to name the repository.

View file

@ -0,0 +1,15 @@
#!/bin/sh
#
# An example hook script to check the commit log message taken by
# applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit. The hook is
# allowed to edit the commit message file.
#
# To enable this hook, rename this file to "applypatch-msg".
. git-sh-setup
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
:

View file

@ -0,0 +1,24 @@
#!/bin/sh
#
# An example hook script to check the commit log message.
# Called by "git commit" with one argument, the name of the file
# that has the commit message. The hook should exit with non-zero
# status after issuing an appropriate message if it wants to stop the
# commit. The hook is allowed to edit the commit message file.
#
# To enable this hook, rename this file to "commit-msg".
# Uncomment the below to add a Signed-off-by line to the message.
# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
# hook is more suited to it.
#
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
# This example catches duplicate Signed-off-by lines.
test "" = "$(grep '^Signed-off-by: ' "$1" |
sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || {
echo >&2 Duplicate Signed-off-by lines.
exit 1
}

View file

@ -0,0 +1,174 @@
#!/usr/bin/perl
use strict;
use warnings;
use IPC::Open2;
# An example hook script to integrate Watchman
# (https://facebook.github.io/watchman/) with git to speed up detecting
# new and modified files.
#
# The hook is passed a version (currently 2) and last update token
# formatted as a string and outputs to stdout a new update token and
# all files that have been modified since the update token. Paths must
# be relative to the root of the working tree and separated by a single NUL.
#
# To enable this hook, rename this file to "query-watchman" and set
# 'git config core.fsmonitor .git/hooks/query-watchman'
#
my ($version, $last_update_token) = @ARGV;
# Uncomment for debugging
# print STDERR "$0 $version $last_update_token\n";
# Check the hook interface version
if ($version ne 2) {
die "Unsupported query-fsmonitor hook version '$version'.\n" .
"Falling back to scanning...\n";
}
my $git_work_tree = get_working_dir();
my $retry = 1;
my $json_pkg;
eval {
require JSON::XS;
$json_pkg = "JSON::XS";
1;
} or do {
require JSON::PP;
$json_pkg = "JSON::PP";
};
launch_watchman();
sub launch_watchman {
my $o = watchman_query();
if (is_work_tree_watched($o)) {
output_result($o->{clock}, @{$o->{files}});
}
}
sub output_result {
my ($clockid, @files) = @_;
# Uncomment for debugging watchman output
# open (my $fh, ">", ".git/watchman-output.out");
# binmode $fh, ":utf8";
# print $fh "$clockid\n@files\n";
# close $fh;
binmode STDOUT, ":utf8";
print $clockid;
print "\0";
local $, = "\0";
print @files;
}
sub watchman_clock {
my $response = qx/watchman clock "$git_work_tree"/;
die "Failed to get clock id on '$git_work_tree'.\n" .
"Falling back to scanning...\n" if $? != 0;
return $json_pkg->new->utf8->decode($response);
}
sub watchman_query {
my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty')
or die "open2() failed: $!\n" .
"Falling back to scanning...\n";
# In the query expression below we're asking for names of files that
# changed since $last_update_token but not from the .git folder.
#
# To accomplish this, we're using the "since" generator to use the
# recency index to select candidate nodes and "fields" to limit the
# output to file names only. Then we're using the "expression" term to
# further constrain the results.
my $last_update_line = "";
if (substr($last_update_token, 0, 1) eq "c") {
$last_update_token = "\"$last_update_token\"";
$last_update_line = qq[\n"since": $last_update_token,];
}
my $query = <<" END";
["query", "$git_work_tree", {$last_update_line
"fields": ["name"],
"expression": ["not", ["dirname", ".git"]]
}]
END
# Uncomment for debugging the watchman query
# open (my $fh, ">", ".git/watchman-query.json");
# print $fh $query;
# close $fh;
print CHLD_IN $query;
close CHLD_IN;
my $response = do {local $/; <CHLD_OUT>};
# Uncomment for debugging the watch response
# open ($fh, ">", ".git/watchman-response.json");
# print $fh $response;
# close $fh;
die "Watchman: command returned no output.\n" .
"Falling back to scanning...\n" if $response eq "";
die "Watchman: command returned invalid output: $response\n" .
"Falling back to scanning...\n" unless $response =~ /^\{/;
return $json_pkg->new->utf8->decode($response);
}
sub is_work_tree_watched {
my ($output) = @_;
my $error = $output->{error};
if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) {
$retry--;
my $response = qx/watchman watch "$git_work_tree"/;
die "Failed to make watchman watch '$git_work_tree'.\n" .
"Falling back to scanning...\n" if $? != 0;
$output = $json_pkg->new->utf8->decode($response);
$error = $output->{error};
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
# Uncomment for debugging watchman output
# open (my $fh, ">", ".git/watchman-output.out");
# close $fh;
# Watchman will always return all files on the first query so
# return the fast "everything is dirty" flag to git and do the
# Watchman query just to get it over with now so we won't pay
# the cost in git to look up each individual file.
my $o = watchman_clock();
$error = $output->{error};
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
output_result($o->{clock}, ("/"));
$last_update_token = $o->{clock};
eval { launch_watchman() };
return 0;
}
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
return 1;
}
sub get_working_dir {
my $working_dir;
if ($^O =~ 'msys' || $^O =~ 'cygwin') {
$working_dir = Win32::GetCwd();
$working_dir =~ tr/\\/\//;
} else {
require Cwd;
$working_dir = Cwd::cwd();
}
return $working_dir;
}

View file

@ -0,0 +1,8 @@
#!/bin/sh
#
# An example hook script to prepare a packed repository for use over
# dumb transports.
#
# To enable this hook, rename this file to "post-update".
exec git update-server-info

View file

@ -0,0 +1,14 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed
# by applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit.
#
# To enable this hook, rename this file to "pre-applypatch".
. git-sh-setup
precommit="$(git rev-parse --git-path hooks/pre-commit)"
test -x "$precommit" && exec "$precommit" ${1+"$@"}
:

View file

@ -0,0 +1,49 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed.
# Called by "git commit" with no arguments. The hook should
# exit with non-zero status after issuing an appropriate message if
# it wants to stop the commit.
#
# To enable this hook, rename this file to "pre-commit".
if git rev-parse --verify HEAD >/dev/null 2>&1
then
against=HEAD
else
# Initial commit: diff against an empty tree object
against=$(git hash-object -t tree /dev/null)
fi
# If you want to allow non-ASCII filenames set this variable to true.
allownonascii=$(git config --type=bool hooks.allownonascii)
# Redirect output to stderr.
exec 1>&2
# Cross platform projects tend to avoid non-ASCII filenames; prevent
# them from being added to the repository. We exploit the fact that the
# printable range starts at the space character and ends with tilde.
if [ "$allownonascii" != "true" ] &&
# Note that the use of brackets around a tr range is ok here, (it's
# even required, for portability to Solaris 10's /usr/bin/tr), since
# the square bracket bytes happen to fall in the designated range.
test $(git diff --cached --name-only --diff-filter=A -z $against |
LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0
then
cat <<\EOF
Error: Attempt to add a non-ASCII file name.
This can cause problems if you want to work with people on other platforms.
To be portable it is advisable to rename the file.
If you know what you are doing you can disable this check using:
git config hooks.allownonascii true
EOF
exit 1
fi
# If there are whitespace errors, print the offending file names and fail.
exec git diff-index --check --cached $against --

View file

@ -0,0 +1,13 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed.
# Called by "git merge" with no arguments. The hook should
# exit with non-zero status after issuing an appropriate message to
# stderr if it wants to stop the merge commit.
#
# To enable this hook, rename this file to "pre-merge-commit".
. git-sh-setup
test -x "$GIT_DIR/hooks/pre-commit" &&
exec "$GIT_DIR/hooks/pre-commit"
:

View file

@ -0,0 +1,53 @@
#!/bin/sh
# An example hook script to verify what is about to be pushed. Called by "git
# push" after it has checked the remote status, but before anything has been
# pushed. If this script exits with a non-zero status nothing will be pushed.
#
# This hook is called with the following parameters:
#
# $1 -- Name of the remote to which the push is being done
# $2 -- URL to which the push is being done
#
# If pushing without using a named remote those arguments will be equal.
#
# Information about the commits which are being pushed is supplied as lines to
# the standard input in the form:
#
# <local ref> <local oid> <remote ref> <remote oid>
#
# This sample shows how to prevent push of commits where the log message starts
# with "WIP" (work in progress).
remote="$1"
url="$2"
zero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')
while read local_ref local_oid remote_ref remote_oid
do
if test "$local_oid" = "$zero"
then
# Handle delete
:
else
if test "$remote_oid" = "$zero"
then
# New branch, examine all commits
range="$local_oid"
else
# Update to existing branch, examine new commits
range="$remote_oid..$local_oid"
fi
# Check for WIP commit
commit=$(git rev-list -n 1 --grep '^WIP' "$range")
if test -n "$commit"
then
echo >&2 "Found WIP commit in $local_ref, not pushing"
exit 1
fi
fi
done
exit 0

View file

@ -0,0 +1,169 @@
#!/bin/sh
#
# Copyright (c) 2006, 2008 Junio C Hamano
#
# The "pre-rebase" hook is run just before "git rebase" starts doing
# its job, and can prevent the command from running by exiting with
# non-zero status.
#
# The hook is called with the following parameters:
#
# $1 -- the upstream the series was forked from.
# $2 -- the branch being rebased (or empty when rebasing the current branch).
#
# This sample shows how to prevent topic branches that are already
# merged to 'next' branch from getting rebased, because allowing it
# would result in rebasing already published history.
publish=next
basebranch="$1"
if test "$#" = 2
then
topic="refs/heads/$2"
else
topic=`git symbolic-ref HEAD` ||
exit 0 ;# we do not interrupt rebasing detached HEAD
fi
case "$topic" in
refs/heads/??/*)
;;
*)
exit 0 ;# we do not interrupt others.
;;
esac
# Now we are dealing with a topic branch being rebased
# on top of master. Is it OK to rebase it?
# Does the topic really exist?
git show-ref -q "$topic" || {
echo >&2 "No such branch $topic"
exit 1
}
# Is topic fully merged to master?
not_in_master=`git rev-list --pretty=oneline ^master "$topic"`
if test -z "$not_in_master"
then
echo >&2 "$topic is fully merged to master; better remove it."
exit 1 ;# we could allow it, but there is no point.
fi
# Is topic ever merged to next? If so you should not be rebasing it.
only_next_1=`git rev-list ^master "^$topic" ${publish} | sort`
only_next_2=`git rev-list ^master ${publish} | sort`
if test "$only_next_1" = "$only_next_2"
then
not_in_topic=`git rev-list "^$topic" master`
if test -z "$not_in_topic"
then
echo >&2 "$topic is already up to date with master"
exit 1 ;# we could allow it, but there is no point.
else
exit 0
fi
else
not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
/usr/bin/perl -e '
my $topic = $ARGV[0];
my $msg = "* $topic has commits already merged to public branch:\n";
my (%not_in_next) = map {
/^([0-9a-f]+) /;
($1 => 1);
} split(/\n/, $ARGV[1]);
for my $elem (map {
/^([0-9a-f]+) (.*)$/;
[$1 => $2];
} split(/\n/, $ARGV[2])) {
if (!exists $not_in_next{$elem->[0]}) {
if ($msg) {
print STDERR $msg;
undef $msg;
}
print STDERR " $elem->[1]\n";
}
}
' "$topic" "$not_in_next" "$not_in_master"
exit 1
fi
<<\DOC_END
This sample hook safeguards topic branches that have been
published from being rewound.
The workflow assumed here is:
* Once a topic branch forks from "master", "master" is never
merged into it again (either directly or indirectly).
* Once a topic branch is fully cooked and merged into "master",
it is deleted. If you need to build on top of it to correct
earlier mistakes, a new topic branch is created by forking at
the tip of the "master". This is not strictly necessary, but
it makes it easier to keep your history simple.
* Whenever you need to test or publish your changes to topic
branches, merge them into "next" branch.
The script, being an example, hardcodes the publish branch name
to be "next", but it is trivial to make it configurable via
$GIT_DIR/config mechanism.
With this workflow, you would want to know:
(1) ... if a topic branch has ever been merged to "next". Young
topic branches can have stupid mistakes you would rather
clean up before publishing, and things that have not been
merged into other branches can be easily rebased without
affecting other people. But once it is published, you would
not want to rewind it.
(2) ... if a topic branch has been fully merged to "master".
Then you can delete it. More importantly, you should not
build on top of it -- other people may already want to
change things related to the topic as patches against your
"master", so if you need further changes, it is better to
fork the topic (perhaps with the same name) afresh from the
tip of "master".
Let's look at this example:
o---o---o---o---o---o---o---o---o---o "next"
/ / / /
/ a---a---b A / /
/ / / /
/ / c---c---c---c B /
/ / / \ /
/ / / b---b C \ /
/ / / / \ /
---o---o---o---o---o---o---o---o---o---o---o "master"
A, B and C are topic branches.
* A has one fix since it was merged up to "next".
* B has finished. It has been fully merged up to "master" and "next",
and is ready to be deleted.
* C has not merged to "next" at all.
We would want to allow C to be rebased, refuse A, and encourage
B to be deleted.
To compute (1):
git rev-list ^master ^topic next
git rev-list ^master next
if these match, topic has not merged in next at all.
To compute (2):
git rev-list master..topic
if this is empty, it is fully merged to "master".
DOC_END

View file

@ -0,0 +1,24 @@
#!/bin/sh
#
# An example hook script to make use of push options.
# The example simply echoes all push options that start with 'echoback='
# and rejects all pushes when the "reject" push option is used.
#
# To enable this hook, rename this file to "pre-receive".
if test -n "$GIT_PUSH_OPTION_COUNT"
then
i=0
while test "$i" -lt "$GIT_PUSH_OPTION_COUNT"
do
eval "value=\$GIT_PUSH_OPTION_$i"
case "$value" in
echoback=*)
echo "echo from the pre-receive-hook: ${value#*=}" >&2
;;
reject)
exit 1
esac
i=$((i + 1))
done
fi

View file

@ -0,0 +1,42 @@
#!/bin/sh
#
# An example hook script to prepare the commit log message.
# Called by "git commit" with the name of the file that has the
# commit message, followed by the description of the commit
# message's source. The hook's purpose is to edit the commit
# message file. If the hook fails with a non-zero status,
# the commit is aborted.
#
# To enable this hook, rename this file to "prepare-commit-msg".
# This hook includes three examples. The first one removes the
# "# Please enter the commit message..." help message.
#
# The second includes the output of "git diff --name-status -r"
# into the message, just before the "git status" output. It is
# commented because it doesn't cope with --amend or with squashed
# commits.
#
# The third example adds a Signed-off-by line to the message, that can
# still be edited. This is rarely a good idea.
COMMIT_MSG_FILE=$1
COMMIT_SOURCE=$2
SHA1=$3
/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE"
# case "$COMMIT_SOURCE,$SHA1" in
# ,|template,)
# /usr/bin/perl -i.bak -pe '
# print "\n" . `git diff --cached --name-status -r`
# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;;
# *) ;;
# esac
# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE"
# if test -z "$COMMIT_SOURCE"
# then
# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE"
# fi

View file

@ -0,0 +1,78 @@
#!/bin/sh
# An example hook script to update a checked-out tree on a git push.
#
# This hook is invoked by git-receive-pack(1) when it reacts to git
# push and updates reference(s) in its repository, and when the push
# tries to update the branch that is currently checked out and the
# receive.denyCurrentBranch configuration variable is set to
# updateInstead.
#
# By default, such a push is refused if the working tree and the index
# of the remote repository has any difference from the currently
# checked out commit; when both the working tree and the index match
# the current commit, they are updated to match the newly pushed tip
# of the branch. This hook is to be used to override the default
# behaviour; however the code below reimplements the default behaviour
# as a starting point for convenient modification.
#
# The hook receives the commit with which the tip of the current
# branch is going to be updated:
commit=$1
# It can exit with a non-zero status to refuse the push (when it does
# so, it must not modify the index or the working tree).
die () {
echo >&2 "$*"
exit 1
}
# Or it can make any necessary changes to the working tree and to the
# index to bring them to the desired state when the tip of the current
# branch is updated to the new commit, and exit with a zero status.
#
# For example, the hook can simply run git read-tree -u -m HEAD "$1"
# in order to emulate git fetch that is run in the reverse direction
# with git push, as the two-tree form of git read-tree -u -m is
# essentially the same as git switch or git checkout that switches
# branches while keeping the local changes in the working tree that do
# not interfere with the difference between the branches.
# The below is a more-or-less exact translation to shell of the C code
# for the default behaviour for git's push-to-checkout hook defined in
# the push_to_deploy() function in builtin/receive-pack.c.
#
# Note that the hook will be executed from the repository directory,
# not from the working tree, so if you want to perform operations on
# the working tree, you will have to adapt your code accordingly, e.g.
# by adding "cd .." or using relative paths.
if ! git update-index -q --ignore-submodules --refresh
then
die "Up-to-date check failed"
fi
if ! git diff-files --quiet --ignore-submodules --
then
die "Working directory has unstaged changes"
fi
# This is a rough translation of:
#
# head_has_history() ? "HEAD" : EMPTY_TREE_SHA1_HEX
if git cat-file -e HEAD 2>/dev/null
then
head=HEAD
else
head=$(git hash-object -t tree --stdin </dev/null)
fi
if ! git diff-index --quiet --cached --ignore-submodules $head --
then
die "Working directory has staged changes"
fi
if ! git read-tree -u -m "$commit"
then
die "Could not update working tree to new HEAD"
fi

View file

@ -0,0 +1,128 @@
#!/bin/sh
#
# An example hook script to block unannotated tags from entering.
# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
#
# To enable this hook, rename this file to "update".
#
# Config
# ------
# hooks.allowunannotated
# This boolean sets whether unannotated tags will be allowed into the
# repository. By default they won't be.
# hooks.allowdeletetag
# This boolean sets whether deleting tags will be allowed in the
# repository. By default they won't be.
# hooks.allowmodifytag
# This boolean sets whether a tag may be modified after creation. By default
# it won't be.
# hooks.allowdeletebranch
# This boolean sets whether deleting branches will be allowed in the
# repository. By default they won't be.
# hooks.denycreatebranch
# This boolean sets whether remotely creating branches will be denied
# in the repository. By default this is allowed.
#
# --- Command line
refname="$1"
oldrev="$2"
newrev="$3"
# --- Safety check
if [ -z "$GIT_DIR" ]; then
echo "Don't run this script from the command line." >&2
echo " (if you want, you could supply GIT_DIR then run" >&2
echo " $0 <ref> <oldrev> <newrev>)" >&2
exit 1
fi
if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
echo "usage: $0 <ref> <oldrev> <newrev>" >&2
exit 1
fi
# --- Config
allowunannotated=$(git config --type=bool hooks.allowunannotated)
allowdeletebranch=$(git config --type=bool hooks.allowdeletebranch)
denycreatebranch=$(git config --type=bool hooks.denycreatebranch)
allowdeletetag=$(git config --type=bool hooks.allowdeletetag)
allowmodifytag=$(git config --type=bool hooks.allowmodifytag)
# check for no description
projectdesc=$(sed -e '1q' "$GIT_DIR/description")
case "$projectdesc" in
"Unnamed repository"* | "")
echo "*** Project description file hasn't been set" >&2
exit 1
;;
esac
# --- Check types
# if $newrev is 0000...0000, it's a commit to delete a ref.
zero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')
if [ "$newrev" = "$zero" ]; then
newrev_type=delete
else
newrev_type=$(git cat-file -t $newrev)
fi
case "$refname","$newrev_type" in
refs/tags/*,commit)
# un-annotated tag
short_refname=${refname##refs/tags/}
if [ "$allowunannotated" != "true" ]; then
echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2
echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2
exit 1
fi
;;
refs/tags/*,delete)
# delete tag
if [ "$allowdeletetag" != "true" ]; then
echo "*** Deleting a tag is not allowed in this repository" >&2
exit 1
fi
;;
refs/tags/*,tag)
# annotated tag
if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1
then
echo "*** Tag '$refname' already exists." >&2
echo "*** Modifying a tag is not allowed in this repository." >&2
exit 1
fi
;;
refs/heads/*,commit)
# branch
if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then
echo "*** Creating a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/heads/*,delete)
# delete branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/remotes/*,commit)
# tracking branch
;;
refs/remotes/*,delete)
# delete tracking branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a tracking branch is not allowed in this repository" >&2
exit 1
fi
;;
*)
# Anything else (is there anything else?)
echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2
exit 1
;;
esac
# --- Finished
exit 0

View file

@ -0,0 +1,6 @@
# git ls-files --others --exclude-from=.git/info/exclude
# Lines that start with '#' are comments.
# For a project mostly in C, the following would be a good set of
# exclude patterns (uncomment them if you want to use them):
# *.[oa]
# *~

View file

@ -0,0 +1,3 @@
x•ÍM
Â0@a×9Åì™ü<E284A2>¤ â\z<>i:Å@zzKÕ¸}ï¥ež³€îôN*3Sôv¤48J=ÙÞEoØŽÎDÒˆÈk"EMnK…ËóN¥ÀµRy,<2C>_pœ¶r_9p;<3B>Á¢‹ˆìqETÚ¾Âÿ
&ú¯ i¥ÞÃ

View file

@ -0,0 +1,2 @@
# pack-refs with: peeled fully-peeled sorted
f095bb4897e4cd58faadfe4d4f678fb697be3ffd refs/heads/main

View file

@ -0,0 +1 @@
925eb0f6b19282b5f10dfe008e0062b4be6dd41a not-for-merge branch 'master' of https://github.com/mssun/passforios-password-store

View file

@ -0,0 +1 @@
ref: refs/heads/master

View file

@ -0,0 +1,9 @@
[core]
repositoryformatversion = 0
filemode = true
bare = true
ignorecase = true
precomposeunicode = true
[remote "origin"]
url = https://github.com/mssun/passforios-password-store.git
fetch = +refs/heads/*:refs/remotes/origin/*

View file

@ -0,0 +1 @@
Example password store repository for passforios tests with .gpg-id files.

View file

@ -0,0 +1,15 @@
#!/bin/sh
#
# An example hook script to check the commit log message taken by
# applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit. The hook is
# allowed to edit the commit message file.
#
# To enable this hook, rename this file to "applypatch-msg".
. git-sh-setup
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
:

View file

@ -0,0 +1,24 @@
#!/bin/sh
#
# An example hook script to check the commit log message.
# Called by "git commit" with one argument, the name of the file
# that has the commit message. The hook should exit with non-zero
# status after issuing an appropriate message if it wants to stop the
# commit. The hook is allowed to edit the commit message file.
#
# To enable this hook, rename this file to "commit-msg".
# Uncomment the below to add a Signed-off-by line to the message.
# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
# hook is more suited to it.
#
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
# This example catches duplicate Signed-off-by lines.
test "" = "$(grep '^Signed-off-by: ' "$1" |
sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || {
echo >&2 Duplicate Signed-off-by lines.
exit 1
}

View file

@ -0,0 +1,174 @@
#!/usr/bin/perl
use strict;
use warnings;
use IPC::Open2;
# An example hook script to integrate Watchman
# (https://facebook.github.io/watchman/) with git to speed up detecting
# new and modified files.
#
# The hook is passed a version (currently 2) and last update token
# formatted as a string and outputs to stdout a new update token and
# all files that have been modified since the update token. Paths must
# be relative to the root of the working tree and separated by a single NUL.
#
# To enable this hook, rename this file to "query-watchman" and set
# 'git config core.fsmonitor .git/hooks/query-watchman'
#
my ($version, $last_update_token) = @ARGV;
# Uncomment for debugging
# print STDERR "$0 $version $last_update_token\n";
# Check the hook interface version
if ($version ne 2) {
die "Unsupported query-fsmonitor hook version '$version'.\n" .
"Falling back to scanning...\n";
}
my $git_work_tree = get_working_dir();
my $retry = 1;
my $json_pkg;
eval {
require JSON::XS;
$json_pkg = "JSON::XS";
1;
} or do {
require JSON::PP;
$json_pkg = "JSON::PP";
};
launch_watchman();
sub launch_watchman {
my $o = watchman_query();
if (is_work_tree_watched($o)) {
output_result($o->{clock}, @{$o->{files}});
}
}
sub output_result {
my ($clockid, @files) = @_;
# Uncomment for debugging watchman output
# open (my $fh, ">", ".git/watchman-output.out");
# binmode $fh, ":utf8";
# print $fh "$clockid\n@files\n";
# close $fh;
binmode STDOUT, ":utf8";
print $clockid;
print "\0";
local $, = "\0";
print @files;
}
sub watchman_clock {
my $response = qx/watchman clock "$git_work_tree"/;
die "Failed to get clock id on '$git_work_tree'.\n" .
"Falling back to scanning...\n" if $? != 0;
return $json_pkg->new->utf8->decode($response);
}
sub watchman_query {
my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty')
or die "open2() failed: $!\n" .
"Falling back to scanning...\n";
# In the query expression below we're asking for names of files that
# changed since $last_update_token but not from the .git folder.
#
# To accomplish this, we're using the "since" generator to use the
# recency index to select candidate nodes and "fields" to limit the
# output to file names only. Then we're using the "expression" term to
# further constrain the results.
my $last_update_line = "";
if (substr($last_update_token, 0, 1) eq "c") {
$last_update_token = "\"$last_update_token\"";
$last_update_line = qq[\n"since": $last_update_token,];
}
my $query = <<" END";
["query", "$git_work_tree", {$last_update_line
"fields": ["name"],
"expression": ["not", ["dirname", ".git"]]
}]
END
# Uncomment for debugging the watchman query
# open (my $fh, ">", ".git/watchman-query.json");
# print $fh $query;
# close $fh;
print CHLD_IN $query;
close CHLD_IN;
my $response = do {local $/; <CHLD_OUT>};
# Uncomment for debugging the watch response
# open ($fh, ">", ".git/watchman-response.json");
# print $fh $response;
# close $fh;
die "Watchman: command returned no output.\n" .
"Falling back to scanning...\n" if $response eq "";
die "Watchman: command returned invalid output: $response\n" .
"Falling back to scanning...\n" unless $response =~ /^\{/;
return $json_pkg->new->utf8->decode($response);
}
sub is_work_tree_watched {
my ($output) = @_;
my $error = $output->{error};
if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) {
$retry--;
my $response = qx/watchman watch "$git_work_tree"/;
die "Failed to make watchman watch '$git_work_tree'.\n" .
"Falling back to scanning...\n" if $? != 0;
$output = $json_pkg->new->utf8->decode($response);
$error = $output->{error};
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
# Uncomment for debugging watchman output
# open (my $fh, ">", ".git/watchman-output.out");
# close $fh;
# Watchman will always return all files on the first query so
# return the fast "everything is dirty" flag to git and do the
# Watchman query just to get it over with now so we won't pay
# the cost in git to look up each individual file.
my $o = watchman_clock();
$error = $output->{error};
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
output_result($o->{clock}, ("/"));
$last_update_token = $o->{clock};
eval { launch_watchman() };
return 0;
}
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
return 1;
}
sub get_working_dir {
my $working_dir;
if ($^O =~ 'msys' || $^O =~ 'cygwin') {
$working_dir = Win32::GetCwd();
$working_dir =~ tr/\\/\//;
} else {
require Cwd;
$working_dir = Cwd::cwd();
}
return $working_dir;
}

View file

@ -0,0 +1,8 @@
#!/bin/sh
#
# An example hook script to prepare a packed repository for use over
# dumb transports.
#
# To enable this hook, rename this file to "post-update".
exec git update-server-info

View file

@ -0,0 +1,14 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed
# by applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit.
#
# To enable this hook, rename this file to "pre-applypatch".
. git-sh-setup
precommit="$(git rev-parse --git-path hooks/pre-commit)"
test -x "$precommit" && exec "$precommit" ${1+"$@"}
:

View file

@ -0,0 +1,49 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed.
# Called by "git commit" with no arguments. The hook should
# exit with non-zero status after issuing an appropriate message if
# it wants to stop the commit.
#
# To enable this hook, rename this file to "pre-commit".
if git rev-parse --verify HEAD >/dev/null 2>&1
then
against=HEAD
else
# Initial commit: diff against an empty tree object
against=$(git hash-object -t tree /dev/null)
fi
# If you want to allow non-ASCII filenames set this variable to true.
allownonascii=$(git config --type=bool hooks.allownonascii)
# Redirect output to stderr.
exec 1>&2
# Cross platform projects tend to avoid non-ASCII filenames; prevent
# them from being added to the repository. We exploit the fact that the
# printable range starts at the space character and ends with tilde.
if [ "$allownonascii" != "true" ] &&
# Note that the use of brackets around a tr range is ok here, (it's
# even required, for portability to Solaris 10's /usr/bin/tr), since
# the square bracket bytes happen to fall in the designated range.
test $(git diff --cached --name-only --diff-filter=A -z $against |
LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0
then
cat <<\EOF
Error: Attempt to add a non-ASCII file name.
This can cause problems if you want to work with people on other platforms.
To be portable it is advisable to rename the file.
If you know what you are doing you can disable this check using:
git config hooks.allownonascii true
EOF
exit 1
fi
# If there are whitespace errors, print the offending file names and fail.
exec git diff-index --check --cached $against --

View file

@ -0,0 +1,13 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed.
# Called by "git merge" with no arguments. The hook should
# exit with non-zero status after issuing an appropriate message to
# stderr if it wants to stop the merge commit.
#
# To enable this hook, rename this file to "pre-merge-commit".
. git-sh-setup
test -x "$GIT_DIR/hooks/pre-commit" &&
exec "$GIT_DIR/hooks/pre-commit"
:

View file

@ -0,0 +1,53 @@
#!/bin/sh
# An example hook script to verify what is about to be pushed. Called by "git
# push" after it has checked the remote status, but before anything has been
# pushed. If this script exits with a non-zero status nothing will be pushed.
#
# This hook is called with the following parameters:
#
# $1 -- Name of the remote to which the push is being done
# $2 -- URL to which the push is being done
#
# If pushing without using a named remote those arguments will be equal.
#
# Information about the commits which are being pushed is supplied as lines to
# the standard input in the form:
#
# <local ref> <local oid> <remote ref> <remote oid>
#
# This sample shows how to prevent push of commits where the log message starts
# with "WIP" (work in progress).
remote="$1"
url="$2"
zero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')
while read local_ref local_oid remote_ref remote_oid
do
if test "$local_oid" = "$zero"
then
# Handle delete
:
else
if test "$remote_oid" = "$zero"
then
# New branch, examine all commits
range="$local_oid"
else
# Update to existing branch, examine new commits
range="$remote_oid..$local_oid"
fi
# Check for WIP commit
commit=$(git rev-list -n 1 --grep '^WIP' "$range")
if test -n "$commit"
then
echo >&2 "Found WIP commit in $local_ref, not pushing"
exit 1
fi
fi
done
exit 0

View file

@ -0,0 +1,169 @@
#!/bin/sh
#
# Copyright (c) 2006, 2008 Junio C Hamano
#
# The "pre-rebase" hook is run just before "git rebase" starts doing
# its job, and can prevent the command from running by exiting with
# non-zero status.
#
# The hook is called with the following parameters:
#
# $1 -- the upstream the series was forked from.
# $2 -- the branch being rebased (or empty when rebasing the current branch).
#
# This sample shows how to prevent topic branches that are already
# merged to 'next' branch from getting rebased, because allowing it
# would result in rebasing already published history.
publish=next
basebranch="$1"
if test "$#" = 2
then
topic="refs/heads/$2"
else
topic=`git symbolic-ref HEAD` ||
exit 0 ;# we do not interrupt rebasing detached HEAD
fi
case "$topic" in
refs/heads/??/*)
;;
*)
exit 0 ;# we do not interrupt others.
;;
esac
# Now we are dealing with a topic branch being rebased
# on top of master. Is it OK to rebase it?
# Does the topic really exist?
git show-ref -q "$topic" || {
echo >&2 "No such branch $topic"
exit 1
}
# Is topic fully merged to master?
not_in_master=`git rev-list --pretty=oneline ^master "$topic"`
if test -z "$not_in_master"
then
echo >&2 "$topic is fully merged to master; better remove it."
exit 1 ;# we could allow it, but there is no point.
fi
# Is topic ever merged to next? If so you should not be rebasing it.
only_next_1=`git rev-list ^master "^$topic" ${publish} | sort`
only_next_2=`git rev-list ^master ${publish} | sort`
if test "$only_next_1" = "$only_next_2"
then
not_in_topic=`git rev-list "^$topic" master`
if test -z "$not_in_topic"
then
echo >&2 "$topic is already up to date with master"
exit 1 ;# we could allow it, but there is no point.
else
exit 0
fi
else
not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
/usr/bin/perl -e '
my $topic = $ARGV[0];
my $msg = "* $topic has commits already merged to public branch:\n";
my (%not_in_next) = map {
/^([0-9a-f]+) /;
($1 => 1);
} split(/\n/, $ARGV[1]);
for my $elem (map {
/^([0-9a-f]+) (.*)$/;
[$1 => $2];
} split(/\n/, $ARGV[2])) {
if (!exists $not_in_next{$elem->[0]}) {
if ($msg) {
print STDERR $msg;
undef $msg;
}
print STDERR " $elem->[1]\n";
}
}
' "$topic" "$not_in_next" "$not_in_master"
exit 1
fi
<<\DOC_END
This sample hook safeguards topic branches that have been
published from being rewound.
The workflow assumed here is:
* Once a topic branch forks from "master", "master" is never
merged into it again (either directly or indirectly).
* Once a topic branch is fully cooked and merged into "master",
it is deleted. If you need to build on top of it to correct
earlier mistakes, a new topic branch is created by forking at
the tip of the "master". This is not strictly necessary, but
it makes it easier to keep your history simple.
* Whenever you need to test or publish your changes to topic
branches, merge them into "next" branch.
The script, being an example, hardcodes the publish branch name
to be "next", but it is trivial to make it configurable via
$GIT_DIR/config mechanism.
With this workflow, you would want to know:
(1) ... if a topic branch has ever been merged to "next". Young
topic branches can have stupid mistakes you would rather
clean up before publishing, and things that have not been
merged into other branches can be easily rebased without
affecting other people. But once it is published, you would
not want to rewind it.
(2) ... if a topic branch has been fully merged to "master".
Then you can delete it. More importantly, you should not
build on top of it -- other people may already want to
change things related to the topic as patches against your
"master", so if you need further changes, it is better to
fork the topic (perhaps with the same name) afresh from the
tip of "master".
Let's look at this example:
o---o---o---o---o---o---o---o---o---o "next"
/ / / /
/ a---a---b A / /
/ / / /
/ / c---c---c---c B /
/ / / \ /
/ / / b---b C \ /
/ / / / \ /
---o---o---o---o---o---o---o---o---o---o---o "master"
A, B and C are topic branches.
* A has one fix since it was merged up to "next".
* B has finished. It has been fully merged up to "master" and "next",
and is ready to be deleted.
* C has not merged to "next" at all.
We would want to allow C to be rebased, refuse A, and encourage
B to be deleted.
To compute (1):
git rev-list ^master ^topic next
git rev-list ^master next
if these match, topic has not merged in next at all.
To compute (2):
git rev-list master..topic
if this is empty, it is fully merged to "master".
DOC_END

View file

@ -0,0 +1,24 @@
#!/bin/sh
#
# An example hook script to make use of push options.
# The example simply echoes all push options that start with 'echoback='
# and rejects all pushes when the "reject" push option is used.
#
# To enable this hook, rename this file to "pre-receive".
if test -n "$GIT_PUSH_OPTION_COUNT"
then
i=0
while test "$i" -lt "$GIT_PUSH_OPTION_COUNT"
do
eval "value=\$GIT_PUSH_OPTION_$i"
case "$value" in
echoback=*)
echo "echo from the pre-receive-hook: ${value#*=}" >&2
;;
reject)
exit 1
esac
i=$((i + 1))
done
fi

View file

@ -0,0 +1,42 @@
#!/bin/sh
#
# An example hook script to prepare the commit log message.
# Called by "git commit" with the name of the file that has the
# commit message, followed by the description of the commit
# message's source. The hook's purpose is to edit the commit
# message file. If the hook fails with a non-zero status,
# the commit is aborted.
#
# To enable this hook, rename this file to "prepare-commit-msg".
# This hook includes three examples. The first one removes the
# "# Please enter the commit message..." help message.
#
# The second includes the output of "git diff --name-status -r"
# into the message, just before the "git status" output. It is
# commented because it doesn't cope with --amend or with squashed
# commits.
#
# The third example adds a Signed-off-by line to the message, that can
# still be edited. This is rarely a good idea.
COMMIT_MSG_FILE=$1
COMMIT_SOURCE=$2
SHA1=$3
/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE"
# case "$COMMIT_SOURCE,$SHA1" in
# ,|template,)
# /usr/bin/perl -i.bak -pe '
# print "\n" . `git diff --cached --name-status -r`
# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;;
# *) ;;
# esac
# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE"
# if test -z "$COMMIT_SOURCE"
# then
# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE"
# fi

View file

@ -0,0 +1,78 @@
#!/bin/sh
# An example hook script to update a checked-out tree on a git push.
#
# This hook is invoked by git-receive-pack(1) when it reacts to git
# push and updates reference(s) in its repository, and when the push
# tries to update the branch that is currently checked out and the
# receive.denyCurrentBranch configuration variable is set to
# updateInstead.
#
# By default, such a push is refused if the working tree and the index
# of the remote repository has any difference from the currently
# checked out commit; when both the working tree and the index match
# the current commit, they are updated to match the newly pushed tip
# of the branch. This hook is to be used to override the default
# behaviour; however the code below reimplements the default behaviour
# as a starting point for convenient modification.
#
# The hook receives the commit with which the tip of the current
# branch is going to be updated:
commit=$1
# It can exit with a non-zero status to refuse the push (when it does
# so, it must not modify the index or the working tree).
die () {
echo >&2 "$*"
exit 1
}
# Or it can make any necessary changes to the working tree and to the
# index to bring them to the desired state when the tip of the current
# branch is updated to the new commit, and exit with a zero status.
#
# For example, the hook can simply run git read-tree -u -m HEAD "$1"
# in order to emulate git fetch that is run in the reverse direction
# with git push, as the two-tree form of git read-tree -u -m is
# essentially the same as git switch or git checkout that switches
# branches while keeping the local changes in the working tree that do
# not interfere with the difference between the branches.
# The below is a more-or-less exact translation to shell of the C code
# for the default behaviour for git's push-to-checkout hook defined in
# the push_to_deploy() function in builtin/receive-pack.c.
#
# Note that the hook will be executed from the repository directory,
# not from the working tree, so if you want to perform operations on
# the working tree, you will have to adapt your code accordingly, e.g.
# by adding "cd .." or using relative paths.
if ! git update-index -q --ignore-submodules --refresh
then
die "Up-to-date check failed"
fi
if ! git diff-files --quiet --ignore-submodules --
then
die "Working directory has unstaged changes"
fi
# This is a rough translation of:
#
# head_has_history() ? "HEAD" : EMPTY_TREE_SHA1_HEX
if git cat-file -e HEAD 2>/dev/null
then
head=HEAD
else
head=$(git hash-object -t tree --stdin </dev/null)
fi
if ! git diff-index --quiet --cached --ignore-submodules $head --
then
die "Working directory has staged changes"
fi
if ! git read-tree -u -m "$commit"
then
die "Could not update working tree to new HEAD"
fi

View file

@ -0,0 +1,128 @@
#!/bin/sh
#
# An example hook script to block unannotated tags from entering.
# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
#
# To enable this hook, rename this file to "update".
#
# Config
# ------
# hooks.allowunannotated
# This boolean sets whether unannotated tags will be allowed into the
# repository. By default they won't be.
# hooks.allowdeletetag
# This boolean sets whether deleting tags will be allowed in the
# repository. By default they won't be.
# hooks.allowmodifytag
# This boolean sets whether a tag may be modified after creation. By default
# it won't be.
# hooks.allowdeletebranch
# This boolean sets whether deleting branches will be allowed in the
# repository. By default they won't be.
# hooks.denycreatebranch
# This boolean sets whether remotely creating branches will be denied
# in the repository. By default this is allowed.
#
# --- Command line
refname="$1"
oldrev="$2"
newrev="$3"
# --- Safety check
if [ -z "$GIT_DIR" ]; then
echo "Don't run this script from the command line." >&2
echo " (if you want, you could supply GIT_DIR then run" >&2
echo " $0 <ref> <oldrev> <newrev>)" >&2
exit 1
fi
if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
echo "usage: $0 <ref> <oldrev> <newrev>" >&2
exit 1
fi
# --- Config
allowunannotated=$(git config --type=bool hooks.allowunannotated)
allowdeletebranch=$(git config --type=bool hooks.allowdeletebranch)
denycreatebranch=$(git config --type=bool hooks.denycreatebranch)
allowdeletetag=$(git config --type=bool hooks.allowdeletetag)
allowmodifytag=$(git config --type=bool hooks.allowmodifytag)
# check for no description
projectdesc=$(sed -e '1q' "$GIT_DIR/description")
case "$projectdesc" in
"Unnamed repository"* | "")
echo "*** Project description file hasn't been set" >&2
exit 1
;;
esac
# --- Check types
# if $newrev is 0000...0000, it's a commit to delete a ref.
zero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')
if [ "$newrev" = "$zero" ]; then
newrev_type=delete
else
newrev_type=$(git cat-file -t $newrev)
fi
case "$refname","$newrev_type" in
refs/tags/*,commit)
# un-annotated tag
short_refname=${refname##refs/tags/}
if [ "$allowunannotated" != "true" ]; then
echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2
echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2
exit 1
fi
;;
refs/tags/*,delete)
# delete tag
if [ "$allowdeletetag" != "true" ]; then
echo "*** Deleting a tag is not allowed in this repository" >&2
exit 1
fi
;;
refs/tags/*,tag)
# annotated tag
if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1
then
echo "*** Tag '$refname' already exists." >&2
echo "*** Modifying a tag is not allowed in this repository." >&2
exit 1
fi
;;
refs/heads/*,commit)
# branch
if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then
echo "*** Creating a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/heads/*,delete)
# delete branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/remotes/*,commit)
# tracking branch
;;
refs/remotes/*,delete)
# delete tracking branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a tracking branch is not allowed in this repository" >&2
exit 1
fi
;;
*)
# Anything else (is there anything else?)
echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2
exit 1
;;
esac
# --- Finished
exit 0

View file

@ -0,0 +1,6 @@
# git ls-files --others --exclude-from=.git/info/exclude
# Lines that start with '#' are comments.
# For a project mostly in C, the following would be a good set of
# exclude patterns (uncomment them if you want to use them):
# *.[oa]
# *~

View file

@ -0,0 +1,2 @@
# pack-refs with: peeled fully-peeled sorted
925eb0f6b19282b5f10dfe008e0062b4be6dd41a refs/heads/master

View file

@ -0,0 +1 @@
925eb0f6b19282b5f10dfe008e0062b4be6dd41a

View file

@ -76,7 +76,6 @@ final class GitCredentialTest: XCTestCase {
}
func testSSHKeyCredentialProvider() throws {
throw XCTSkip("Skipped. This test failed in CI environment. Reason still unknown.")
let credentialProvider = getCredentialProvider(authenticationMethod: .key)
XCTAssertNotNil(credentialProvider.credential(for: .sshCustom, url: nil, userName: nil))

View file

@ -13,47 +13,320 @@ import XCTest
@testable import passKit
final class PasswordStoreTest: XCTestCase {
private let remoteRepoURL = URL(string: "https://github.com/mssun/passforios-password-store.git")!
private let localRepoURL: URL = Globals.sharedContainerURL.appendingPathComponent("Library/password-store-test/")
private var passwordStore: PasswordStore! = nil
override func setUp() {
passwordStore = PasswordStore(url: localRepoURL)
}
override func tearDown() {
passwordStore.erase()
passwordStore = nil
Defaults.removeAll()
}
func testInitPasswordEntityCoreData() throws {
try cloneRepository(.withGPGID)
XCTAssertEqual(passwordStore.numberOfPasswords, 4)
XCTAssertEqual(passwordStore.numberOfCommits, 16)
XCTAssertEqual(passwordStore.numberOfLocalCommits, 0)
let entity = passwordStore.fetchPasswordEntity(with: "personal/github.com.gpg")
XCTAssertEqual(entity!.path, "personal/github.com.gpg")
XCTAssertEqual(entity!.name, "github.com")
XCTAssertTrue(entity!.isSynced)
XCTAssertEqual(entity!.parent!.name, "personal")
XCTAssertNotNil(passwordStore.fetchPasswordEntity(with: "family/amazon.com.gpg"))
XCTAssertNotNil(passwordStore.fetchPasswordEntity(with: "work/github.com.gpg"))
XCTAssertNotNil(passwordStore.fetchPasswordEntity(with: "shared/github.com.gpg"))
let dirEntity = passwordStore.fetchPasswordEntity(with: "shared")
XCTAssertNotNil(dirEntity)
XCTAssertTrue(dirEntity!.isDir)
XCTAssertEqual(dirEntity!.name, "shared")
XCTAssertEqual(dirEntity!.children.count, 1)
}
func testEraseStoreData() throws {
try cloneRepository(.withGPGID)
XCTAssertTrue(FileManager.default.fileExists(atPath: localRepoURL.path))
XCTAssertGreaterThan(passwordStore.numberOfPasswords, 0)
XCTAssertNotNil(passwordStore.gitRepository)
expectation(forNotification: .passwordStoreUpdated, object: nil)
expectation(forNotification: .passwordStoreErased, object: nil)
passwordStore.eraseStoreData()
XCTAssertFalse(FileManager.default.fileExists(atPath: localRepoURL.path))
XCTAssertEqual(passwordStore.numberOfPasswords, 0)
XCTAssertNil(passwordStore.gitRepository)
waitForExpectations(timeout: 1, handler: nil)
}
func testErase() throws {
try cloneRepository(.withGPGID)
try importSinglePGPKey()
Defaults.gitSignatureName = "Test User"
PasscodeLock.shared.save(passcode: "1234")
XCTAssertGreaterThan(passwordStore.numberOfPasswords, 0)
XCTAssertTrue(AppKeychain.shared.contains(key: PGPKey.PUBLIC.getKeychainKey()))
XCTAssertEqual(Defaults.gitSignatureName, "Test User")
XCTAssertTrue(PasscodeLock.shared.hasPasscode)
XCTAssertTrue(PGPAgent.shared.isInitialized())
expectation(forNotification: .passwordStoreUpdated, object: nil)
expectation(forNotification: .passwordStoreErased, object: nil)
passwordStore.erase()
XCTAssertEqual(passwordStore.numberOfPasswords, 0)
XCTAssertFalse(AppKeychain.shared.contains(key: PGPKey.PUBLIC.getKeychainKey()))
XCTAssertFalse(Defaults.hasKey(\.gitSignatureName))
XCTAssertFalse(PasscodeLock.shared.hasPasscode)
XCTAssertFalse(PGPAgent.shared.isInitialized())
waitForExpectations(timeout: 1, handler: nil)
}
func testFetchPasswordEntityCoreDataByParent() throws {
try cloneRepository(.withGPGID)
let rootChildren = passwordStore.fetchPasswordEntityCoreData(parent: nil)
XCTAssertGreaterThan(rootChildren.count, 0)
rootChildren.forEach { entity in
XCTAssertTrue(entity.isDir)
}
let personalDir = passwordStore.fetchPasswordEntity(with: "personal")
let personalChildren = passwordStore.fetchPasswordEntityCoreData(parent: personalDir)
XCTAssertEqual(personalChildren.count, 1)
XCTAssertEqual(personalChildren.first?.name, "github.com")
}
func testFetchPasswordEntityCoreDataWithDir() throws {
try cloneRepository(.withGPGID)
let allPasswords = passwordStore.fetchPasswordEntityCoreData(withDir: false)
XCTAssertEqual(allPasswords.count, 4)
allPasswords.forEach { entity in
XCTAssertFalse(entity.isDir)
}
}
func testAddPassword() throws {
try cloneRepository(.empty)
try importSinglePGPKey()
let numCommitsBefore = passwordStore.numberOfCommits!
let numLocalCommitsBefore = passwordStore.numberOfLocalCommits
let password1 = Password(name: "test1", path: "test1.gpg", plainText: "foobar")
let password2 = Password(name: "test2", path: "test2.gpg", plainText: "hello world")
let password3 = Password(name: "test3", path: "folder/test3.gpg", plainText: "lorem ipsum")
let password4 = Password(name: "test4", path: "test4.gpg", plainText: "you are valuable and you matter")
[password1, password2, password3, password4].forEach { password in
expectation(forNotification: .passwordStoreUpdated, object: nil)
let savedEntity = try? passwordStore.add(password: password)
XCTAssertEqual(savedEntity!.name, password.name)
waitForExpectations(timeout: 1, handler: nil)
}
XCTAssertTrue(FileManager.default.fileExists(atPath: localRepoURL.appendingPathComponent("test1.gpg").path))
XCTAssertTrue(FileManager.default.fileExists(atPath: localRepoURL.appendingPathComponent("test2.gpg").path))
XCTAssertTrue(FileManager.default.fileExists(atPath: localRepoURL.appendingPathComponent("folder").path))
XCTAssertTrue(FileManager.default.fileExists(atPath: localRepoURL.appendingPathComponent("folder/test3.gpg").path))
XCTAssertTrue(FileManager.default.fileExists(atPath: localRepoURL.appendingPathComponent("test4.gpg").path))
XCTAssertEqual(passwordStore.numberOfCommits!, numCommitsBefore + 4)
XCTAssertEqual(passwordStore.numberOfLocalCommits, numLocalCommitsBefore + 4)
}
func testDeletePassword() throws {
try cloneRepository(.withGPGID)
let numCommitsBefore = passwordStore.numberOfCommits!
let numLocalCommitsBefore = passwordStore.numberOfLocalCommits
expectation(forNotification: .passwordStoreUpdated, object: nil)
let entity = passwordStore.fetchPasswordEntity(with: "personal/github.com.gpg")
try passwordStore.delete(passwordEntity: entity!)
XCTAssertNil(passwordStore.fetchPasswordEntity(with: "personal/github.com.gpg"))
XCTAssertNil(passwordStore.fetchPasswordEntity(with: "personal"))
XCTAssertFalse(FileManager.default.fileExists(atPath: localRepoURL.appendingPathComponent("personal").path))
XCTAssertEqual(passwordStore.numberOfCommits!, numCommitsBefore + 1)
XCTAssertEqual(passwordStore.numberOfLocalCommits, numLocalCommitsBefore + 1)
waitForExpectations(timeout: 1, handler: nil)
}
func testDeleteDirectoryFails() throws {
try cloneRepository(.withGPGID)
let numCommitsBefore = passwordStore.numberOfCommits!
let numLocalCommitsBefore = passwordStore.numberOfLocalCommits
expectation(forNotification: .passwordStoreUpdated, object: nil).isInverted = true
let entity = passwordStore.fetchPasswordEntity(with: "personal")
XCTAssertThrowsError(try passwordStore.delete(passwordEntity: entity!)) { error in
XCTAssertTrue(error is AppError, "Unexpected error type: \(type(of: error))")
XCTAssertEqual(error as? AppError, .cannotDeleteDirectory)
}
XCTAssertNotNil(passwordStore.fetchPasswordEntity(with: "personal/github.com.gpg"))
XCTAssertTrue(FileManager.default.fileExists(atPath: localRepoURL.appendingPathComponent("personal/github.com.gpg").path))
XCTAssertEqual(passwordStore.numberOfCommits!, numCommitsBefore)
XCTAssertEqual(passwordStore.numberOfLocalCommits, numLocalCommitsBefore)
waitForExpectations(timeout: 0.1, handler: nil)
}
func testEditPasswordValue() throws {
try cloneRepository(.withGPGID)
try importSinglePGPKey()
let numCommitsBefore = passwordStore.numberOfCommits!
let numLocalCommitsBefore = passwordStore.numberOfLocalCommits
let entity = passwordStore.fetchPasswordEntity(with: "personal/github.com.gpg")!
expectation(forNotification: .passwordStoreUpdated, object: nil)
let editedPassword = Password(name: entity.name, path: entity.path, plainText: "editedpassword")
editedPassword.changed = PasswordChange.content.rawValue
let editedEntity = try passwordStore.edit(passwordEntity: entity, password: editedPassword)
XCTAssertNotNil(editedEntity)
XCTAssertEqual(editedEntity!.name, "github.com")
XCTAssertFalse(editedEntity!.isSynced)
XCTAssertEqual(try decrypt(path: "personal/github.com.gpg").plainText, "editedpassword")
XCTAssertEqual(passwordStore.numberOfCommits!, numCommitsBefore + 1)
XCTAssertEqual(passwordStore.numberOfLocalCommits, numLocalCommitsBefore + 1)
waitForExpectations(timeout: 1, handler: nil)
}
func testMovePassword() throws {
try cloneRepository(.withGPGID)
try importSinglePGPKey()
let numCommitsBefore = passwordStore.numberOfCommits!
let numLocalCommitsBefore = passwordStore.numberOfLocalCommits
let entity = passwordStore.fetchPasswordEntity(with: "personal/github.com.gpg")!
expectation(forNotification: .passwordStoreUpdated, object: nil)
let editedPassword = Password(name: "new name", path: "new name.gpg", plainText: "passwordforpersonal\n")
editedPassword.changed = PasswordChange.path.rawValue
let editedEntity = try passwordStore.edit(passwordEntity: entity, password: editedPassword)
XCTAssertEqual(editedEntity!.name, "new name")
XCTAssertFalse(editedEntity!.isSynced)
XCTAssertEqual(try decrypt(path: "new name.gpg").plainText, "passwordforpersonal\n")
XCTAssertNil(passwordStore.fetchPasswordEntity(with: "personal/github.com.gpg"))
XCTAssertEqual(passwordStore.numberOfCommits!, numCommitsBefore + 1)
XCTAssertEqual(passwordStore.numberOfLocalCommits, numLocalCommitsBefore + 1)
waitForExpectations(timeout: 1, handler: nil)
}
func testReset() throws {
try cloneRepository(.withGPGID)
try importSinglePGPKey()
let numCommitsBefore = passwordStore.numberOfCommits!
let numLocalCommitsBefore = passwordStore.numberOfLocalCommits
_ = try? passwordStore.add(password: Password(name: "test", path: "test.gpg", plainText: "foobar"))
try passwordStore.delete(passwordEntity: passwordStore.fetchPasswordEntity(with: "personal/github.com.gpg")!)
expectation(forNotification: .passwordStoreUpdated, object: nil)
let numDroppedCommits = try passwordStore.reset()
XCTAssertEqual(numDroppedCommits, 2)
XCTAssertFalse(FileManager.default.fileExists(atPath: localRepoURL.appendingPathComponent("test.gpg").path))
XCTAssertTrue(FileManager.default.fileExists(atPath: localRepoURL.appendingPathComponent("personal/github.com.gpg").path))
XCTAssertEqual(passwordStore.numberOfCommits!, numCommitsBefore)
XCTAssertEqual(passwordStore.numberOfLocalCommits, numLocalCommitsBefore)
waitForExpectations(timeout: 1, handler: nil)
}
// MARK: - .gpg-id support
func testCloneAndDecryptMultiKeys() throws {
let url = Globals.sharedContainerURL.appendingPathComponent("Library/password-store-test/")
try cloneRepository(.withGPGID)
try importMultiplePGPKeys()
Defaults.isEnableGPGIDOn = true
let passwordStore = PasswordStore(url: url)
try passwordStore.cloneRepository(remoteRepoURL: remoteRepoURL, branchName: "master")
expectation(for: NSPredicate { _, _ in FileManager.default.fileExists(atPath: url.path) }, evaluatedWith: nil)
waitForExpectations(timeout: 3, handler: nil)
[
("work/github.com", "4712286271220DB299883EA7062E678DA1024DAE"),
("personal/github.com", "787EAE1A5FA3E749AA34CC6AA0645EBED862027E"),
].forEach { path, id in
let keyID = findGPGID(from: url.appendingPathComponent(path))
let keyID = findGPGID(from: localRepoURL.appendingPathComponent(path))
XCTAssertEqual(keyID, id)
}
let keychain = AppKeychain.shared
try KeyFileManager(keyType: PGPKey.PUBLIC, keyPath: "", keyHandler: keychain.add).importKey(from: RSA2048_RSA4096.publicKeys)
try KeyFileManager(keyType: PGPKey.PRIVATE, keyPath: "", keyHandler: keychain.add).importKey(from: RSA2048_RSA4096.privateKeys)
try PGPAgent.shared.initKeys()
let personal = try decrypt(passwordStore: passwordStore, path: "personal/github.com.gpg", passphrase: "passforios")
let personal = try decrypt(path: "personal/github.com.gpg")
XCTAssertEqual(personal.plainText, "passwordforpersonal\n")
let work = try decrypt(passwordStore: passwordStore, path: "work/github.com.gpg", passphrase: "passforios")
let work = try decrypt(path: "work/github.com.gpg")
XCTAssertEqual(work.plainText, "passwordforwork\n")
let testPassword = Password(name: "test", path: "test.gpg", plainText: "testpassword")
let testPasswordEntity = try passwordStore.add(password: testPassword)!
let testPasswordPlain = try passwordStore.decrypt(passwordEntity: testPasswordEntity, requestPGPKeyPassphrase: requestPGPKeyPassphrase)
XCTAssertEqual(testPasswordPlain.plainText, "testpassword")
passwordStore.erase()
Defaults.isEnableGPGIDOn = false
}
private func decrypt(passwordStore: PasswordStore, path: String, passphrase _: String) throws -> Password {
// MARK: - Helpers
private enum RemoteRepo {
case empty
case withGPGID
var url: URL {
switch self {
case .empty:
Bundle(for: PasswordStoreTest.self).resourceURL!.appendingPathComponent("Fixtures/password-store-empty.git")
case .withGPGID:
Bundle(for: PasswordStoreTest.self).resourceURL!.appendingPathComponent("Fixtures/password-store-with-gpgid.git")
}
}
var branchName: String {
switch self {
case .empty:
"main"
case .withGPGID:
"master"
}
}
}
private func cloneRepository(_ remote: RemoteRepo) throws {
expectation(for: NSPredicate { _, _ in FileManager.default.fileExists(atPath: self.localRepoURL.path) }, evaluatedWith: nil)
expectation(forNotification: .passwordStoreUpdated, object: nil)
try passwordStore.cloneRepository(remoteRepoURL: remote.url, branchName: remote.branchName)
waitForExpectations(timeout: 3, handler: nil)
}
private func importSinglePGPKey() throws {
let keychain = AppKeychain.shared
try KeyFileManager(keyType: PGPKey.PUBLIC, keyPath: "", keyHandler: keychain.add).importKey(from: RSA4096.publicKey)
try KeyFileManager(keyType: PGPKey.PRIVATE, keyPath: "", keyHandler: keychain.add).importKey(from: RSA4096.privateKey)
try PGPAgent.shared.initKeys()
}
private func importMultiplePGPKeys() throws {
let keychain = AppKeychain.shared
try KeyFileManager(keyType: PGPKey.PUBLIC, keyPath: "", keyHandler: keychain.add).importKey(from: RSA2048_RSA4096.publicKeys)
try KeyFileManager(keyType: PGPKey.PRIVATE, keyPath: "", keyHandler: keychain.add).importKey(from: RSA2048_RSA4096.privateKeys)
try PGPAgent.shared.initKeys()
}
private func decrypt(path: String, keyID: String? = nil) throws -> Password {
let entity = passwordStore.fetchPasswordEntity(with: path)!
return try passwordStore.decrypt(passwordEntity: entity, requestPGPKeyPassphrase: requestPGPKeyPassphrase)
return try passwordStore.decrypt(passwordEntity: entity, keyID: keyID, requestPGPKeyPassphrase: requestPGPKeyPassphrase)
}
}

View file

@ -0,0 +1,115 @@
# Improve Test Coverage Plan
## Motivation
The passKit codebase has ~100 test methods but critical components that will be heavily refactored (for multi-store support and other changes) have little or no test coverage. Adding regression tests now prevents silent breakage during future work.
This is standalone — it should be done before any other refactoring.
---
## Current Test Coverage
### Well-tested areas
- Password parsing (`Password`, `Parser`, `AdditionField`, OTP, `TokenBuilder`) — ~40 tests
- `PGPAgent` — 8 tests covering multiple key types, error cases, passphrase handling
- `PasswordGenerator` — 8 tests
- `GitRepository` — 8 tests (uses real temp git repos on disk)
- `GitCredential` — 6 tests (SSH test is skipped/"failed in CI")
- `PasswordEntity` Core Data operations — 6 tests (uses in-memory store via `CoreDataTestCase`)
- `KeyFileManager` — 7 tests
- `QRKeyScanner` — 6 tests
- String/Array extensions — 6 tests
### Critical gaps (zero tests)
| Component | Notes |
|-----------|-------|
| **`PasswordStore`** (36 methods) | Only 1 integration test that clones from GitHub. No unit tests for pull, push, add, delete, edit, decrypt, encrypt, reset, erase, eraseStoreData, deleteCoreData, fetchPasswordEntityCoreData, initPasswordEntityCoreData. |
| **`AppKeychain`** | Zero tests. Only exercised indirectly via `DictBasedKeychain` mock. |
| **`PersistenceController` / Core Data stack** | Only the `isUnitTest: true` path is exercised. No tests for `reinitializePersistentStore`, `deletePersistentStore`, error recovery. |
| **Services** (`PasswordDecryptor`, `PasswordEncryptor`, `PasswordManager`, `PasswordNavigationDataSource`) | Zero tests. Core business logic that ties `PasswordStore` + `PGPAgent` together. |
| **All view controllers (28+)** | Zero tests. No UI test target exists. |
| **AutoFill / Share / Shortcuts extensions** | Zero tests. No test targets for extensions. |
| **`PasscodeLock`** | Zero tests. Security-critical. |
### Test infrastructure that already exists
- `CoreDataTestCase` — base class with in-memory `PersistenceController` (reusable)
- `DictBasedKeychain` — in-memory `KeyStore` mock (reusable)
- `TestPGPKeys` — PGP key fixtures for RSA2048, RSA4096, ED25519, NISTP384, multi-key sets
---
## Implementation
### 1. Fixture password-store repo
A pre-built bare git repo checked into `passKitTests/Fixtures/password-store.git/`. Contains:
- A `.gpg-id` file with test key ID(s)
- Several `.gpg` files encrypted with the test keys from `TestPGPKeys` (at various directory depths)
- A subdirectory structure to exercise the BFS walk (nested folders, empty dirs)
- A git history with at least a couple of commits
Since it's a bare repo, its contents (`HEAD`, `objects/`, `refs/`, etc.) are just regular files from the outer repo's perspective — no submodule issues.
**Xcode project setup**: The fixture directory must be added to the Xcode project as a **folder reference** (blue folder) in the passKitTests target, and with "Build Rules" set to "Apply Once to Folder", so it's included in the "Copy Bundle Resources" build phase. In Xcode: drag the `Fixtures/` directory into the passKitTests group → select "Create folder references" → check only the passKitTests target. Without this, the files won't be accessible from the test bundle at runtime.
To update the fixture, pull from origin (already set to `https://github.com/mssun/passforios-password-store.git`) or replace with any local bare repo:
```sh
# Update from origin
cd passKitTests/Fixtures/password-store.git
git fetch origin
git update-ref refs/heads/master origin/master
# Or replace with a custom local repo
git clone --bare /path/to/local/repo passKitTests/Fixtures/password-store.git
```
### 2. `PasswordStore` unit tests (highest priority)
- **Test `initPasswordEntityCoreData`**: Clone the fixture repo → verify correct `PasswordEntity` tree in Core Data (names, paths, directories, parent-child relationships).
- **Test `deleteCoreData`**: Populate, then delete, verify empty.
- **Test `eraseStoreData`**: Verify repo directory deleted, Core Data cleared, git handle nil'd.
- **Test `erase`**: Verify full cleanup (keychain, defaults, passcode, PGP state).
- **Test `fetchPasswordEntityCoreData`**: Verify fetch with parent filter, withDir filter.
- **Test encrypt → save → decrypt round-trip**: Using `DictBasedKeychain` + test PGP keys + local repo.
- **Test `add` / `delete` / `edit`**: Verify filesystem + Core Data + git commit.
- **Test `reset`**: Verify Core Data rebuilt to match filesystem after git reset.
### 3. `PasswordEntity` relationship tests
Extend `PasswordEntityTest` (already uses `CoreDataTestCase`):
- **Test `initPasswordEntityCoreData` BFS walk**: Create a temp directory tree with `.gpg` files, call the static method, verify entity tree matches filesystem.
- **Test that `.gpg` extension is stripped** from names but non-`.gpg` files keep their names.
- **Test hidden files are skipped**.
- **Test empty directories**.
### 4. `AppKeychain` tests
Basic tests against the real Keychain API (or a test wrapper):
- **Test `add` / `get` / `removeContent`** round-trip.
- **Test `removeAllContent`**.
- **Test `contains`**.
- **Test `removeAllContent(withPrefix:)`** — this method already exists and will be useful for per-store cleanup.
### 5. `PersistenceController` tests
- **Test `reinitializePersistentStore`** — verify existing data is gone after reinit.
- **Test model loading** — verify the `.momd` loads correctly.
---
## Implementation Order
| Step | Description |
|------|-------------|
| 1 | Fixture password-store bare repo |
| 2 | `PasswordStore` unit tests (uses fixture from step 1) |
| 3 | `PasswordEntity` BFS walk + relationship tests |
| 4 | `AppKeychain` tests |
| 5 | `PersistenceController` tests |
Steps 25 can be done in parallel once step 1 is complete. Steps 35 are also independent of step 1.

View file

@ -0,0 +1,192 @@
# Multi-Recipient Encryption Plan
## Concept
The `pass` password store format supports encrypting each password to multiple PGP keys via `.gpg-id` files (one key ID per line). This enables sharing a store with other users — each person imports the same git repository but decrypts with their own private key. When adding or editing a password, it must be encrypted to **all** key IDs listed in `.gpg-id`.
The app currently has a setting (`isEnableGPGIDOn`) that reads `.gpg-id` for per-directory key selection, but it only supports a single key ID. This plan fixes every layer to support multiple recipients.
This is standalone — it can be implemented before or after multi-store support.
---
## Current State
The codebase does **not** support encrypting to multiple public keys. Every layer assumes a single recipient:
| Layer | Current state | What needs to change |
|-------|--------------|---------------------|
| `.gpg-id` file format | Supports multiple key IDs (one per line) | No change needed |
| `findGPGID(from:)` | Returns the **entire file as one trimmed string** — does not split by newline | Split by newline, return `[String]` |
| `PGPInterface.encrypt()` | Signature: `encrypt(plainData:keyID:)` — singular `keyID: String?` | Add `encrypt(plainData:keyIDs:[String])` or change `keyID` to `keyIDs: [String]?` |
| `GopenPGPInterface` | Creates a `CryptoKeyRing` with **one** public key | Add all recipient public keys to the keyring before encrypting |
| `ObjectivePGPInterface` | Passes `keyring.keys` (all keys, including private) — accidentally multi-recipient but not intentionally | Filter to only the specified public keys, pass those to `ObjectivePGP.encrypt()` |
| `PGPAgent.encrypt()` | Routes to a single key via `keyID: String` | Accept `[String]` and pass through to the interface |
| `PasswordStore.encrypt()` | Calls `findGPGID()` for a single key ID string | Call the updated `findGPGID()`, pass the key ID array |
---
## Implementation
### 1. `findGPGID(from:) -> [String]`
Split file contents by newline, trim each line, filter empty lines. Return array of key IDs. Callers that only need a single key (e.g. for decryption routing) can use `.first`.
### 2. `PGPInterface` protocol
Change `encrypt(plainData:keyID:)` to `encrypt(plainData:keyIDs:)` where `keyIDs: [String]?`. When `nil`, encrypt to the first/default key (backward compatible).
### 3. `GopenPGPInterface.encrypt()`
Look up all keys matching the `keyIDs` array from `publicKeys`. Add each to the `CryptoKeyRing` (GopenPGP's `CryptoKeyRing` supports multiple keys via `add()`). Encrypt with the multi-key ring.
### 4. `ObjectivePGPInterface.encrypt()`
Filter `keyring.keys` to only the public keys matching the requested `keyIDs`. Pass the filtered array to `ObjectivePGP.encrypt()`.
### 5. `PGPAgent.encrypt()`
Update both overloads to accept `keyIDs: [String]?` and pass through to the interface.
### 6. `PasswordStore.encrypt()`
Call updated `findGPGID()`, pass the array to `PGPAgent`.
---
## Public Key Management
When a store lists multiple key IDs in `.gpg-id`, the user needs the public keys of all recipients. The user's own private key is sufficient for decryption (since the message is encrypted to all recipients), but all public keys are needed for re-encryption when editing.
### Current state
- The keychain holds exactly **one** `pgpPublicKey` blob and **one** `pgpPrivateKey` blob.
- The import UI (armor paste, URL, file picker) has one public key field + one private key field. Importing **replaces** the previous key pair entirely.
- Both `GopenPGPInterface` and `ObjectivePGPInterface` *can* parse multiple keys from a single armored blob (e.g. concatenated armor blocks). So if the user pastes multiple public keys into the single field, they would be parsed — but the encrypt path only uses one key, and the UI doesn't communicate this.
- There is no UI for viewing which key IDs are loaded.
### Key storage approach
Store all public keys as a single concatenated armored blob in the keychain (`pgpPublicKey`). Both interface implementations already parse multi-key blobs into dictionaries/keyrings. This avoids schema changes — we just need to **append** instead of **replace** when importing additional public keys.
The user's own private key stays as a separate single blob (`pgpPrivateKey`).
### 7. UI: Import additional recipient public keys
Add an "Import Recipient Key" action to the PGP key settings (alongside the existing import that sets the user's own key pair). This flow:
- Imports a public-key-only armored blob
- **Appends** it to the existing `pgpPublicKey` keychain entry (concatenating armored blocks)
- Does **not** touch the private key
- On success, shows the newly imported key ID(s)
The existing import flow ("Set PGP Keys") continues to replace the user's own key pair (public + private).
### 8. UI: View loaded key IDs and metadata
PGP keys carry a **User ID** field, typically in the format `"Name <email@example.com>"`. Both GopenPGP (`key.entity.PrimaryIdentity()`) and ObjectivePGP (`key.keyID` + user ID packets) can access this. The app currently doesn't expose it.
Add key metadata to the `PGPInterface` protocol:
```swift
struct PGPKeyInfo {
let fingerprint: String // full fingerprint
let shortKeyID: String // last 8 hex chars
let userID: String? // "Name <email>" from the primary identity
let isPrivate: Bool // has a matching private key
let isExpired: Bool
let isRevoked: Bool
}
var keyInfo: [PGPKeyInfo] { get }
```
Both `GopenPGPInterface` and `ObjectivePGPInterface` should implement this by iterating their loaded keys.
Add a read-only section to the PGP key settings showing all loaded public keys. Each row shows:
- **User ID** (e.g. `"Alice <alice@example.com>"`) as the primary label — this is the human-readable identifier
- **Short key ID** (e.g. `ABCD1234`) as the secondary label
- Badge/icon if it's the user's own key (has matching private key) vs a recipient-only key
- Badge/icon if expired or revoked
- Swipe-to-delete to remove a recipient public key
This also informs the `.gpg-id` editing UI (§9) — when the user adds/removes recipients from `.gpg-id`, they see names and emails, not just opaque hex key IDs.
### 9. UI: View/edit `.gpg-id` files
When `isEnableGPGIDOn` is enabled, add visibility into `.gpg-id`:
- In the password detail view, show which key IDs the password is encrypted to (from the nearest `.gpg-id` file)
- In folder navigation, show an indicator on directories that have their own `.gpg-id`
- Tapping the indicator shows the `.gpg-id` contents (list of key IDs) with an option to edit
- Editing `.gpg-id` triggers re-encryption of all passwords in the directory (see §10)
Note: Viewing `.gpg-id` is low-effort and high-value. Editing is more complex due to re-encryption. These can be split into separate steps.
### 10. Re-encryption when `.gpg-id` changes
When the user edits a `.gpg-id` file (adding/removing a recipient), all `.gpg` files in that directory (and subdirectories without their own `.gpg-id`) must be re-encrypted to the new recipient list. This is equivalent to `pass init -p subfolder KEY1 KEY2`.
Steps:
1. Write the new `.gpg-id` file
2. For each `.gpg` file under the directory:
- Decrypt with the user's private key
- Re-encrypt to the new recipient list
- Overwrite the `.gpg` file
3. Git add all changed files + `.gpg-id`
4. Git commit
This can be expensive for large directories. Show progress and allow cancellation.
---
## Implementation Order
| Step | Description | Depends On |
|------|-------------|------------|
| 1 | `findGPGID` returns `[String]` + update callers | — |
| 2 | `PGPInterface` protocol change (`keyIDs: [String]?`) | — |
| 3 | `GopenPGPInterface` multi-key encryption | Step 2 |
| 4 | `ObjectivePGPInterface` multi-key encryption | Step 2 |
| 5 | `PGPAgent` updated overloads | Steps 2-4 |
| 6 | `PasswordStore.encrypt()` uses `[String]` from `findGPGID` | Steps 1+5 |
| 7 | UI: import additional recipient public keys | Step 5 |
| 8 | UI: view loaded key IDs | Step 5 |
| 9a | UI: view `.gpg-id` in password detail / folder view | Step 1 |
| 9b | UI: edit `.gpg-id` | Step 9a |
| 10 | Re-encryption when `.gpg-id` changes | Steps 6+9b |
| T | Tests (see testing section) | Steps 1-10 |
---
## Testing
### Pre-work: existing encryption tests
The `PGPAgentTest` already covers single-key encrypt/decrypt with multiple key types. These serve as the regression baseline.
### Multi-recipient encryption tests
- **Test `findGPGID` with multi-line `.gpg-id`**: File with two key IDs on separate lines → returns `[String]` with both.
- **Test `findGPGID` with single-line `.gpg-id`**: Backward compatible → returns `[String]` with one element.
- **Test `findGPGID` with empty lines and whitespace**: Trims and filters correctly.
- **Test `GopenPGPInterface.encrypt` with multiple keys**: Encrypt with two public keys → decrypt succeeds with either private key.
- **Test `ObjectivePGPInterface.encrypt` with multiple keys**: Same as above.
- **Test `PGPAgent.encrypt` with `keyIDs` array**: Routes through correctly to the interface.
- **Test round-trip**: Encrypt with key IDs `[A, B]` → user with private key A can decrypt, user with private key B can decrypt.
- **Test encrypt with single keyID still works**: Backward compatibility — `keyIDs: ["X"]` behaves like the old `keyID: "X"`.
- **Test encrypt with unknown keyID in list**: If one of the key IDs is not in the keyring, appropriate error is thrown.
- **Test multi-key public key import**: Import an armored blob containing multiple public keys → all are available for encryption.
### Key management tests
- **Test appending recipient public key**: Import user's key pair → append a second public key → both key IDs are available. Original private key still works for decryption.
- **Test removing a recipient public key**: Remove one public key from the concatenated blob → only the remaining key IDs are available.
- **Test replacing key pair doesn't lose recipient keys**: Import user's key pair → add recipient key → re-import user's key pair → recipient key is still present (or: design decision — should re-import clear everything?).
### `.gpg-id` and re-encryption tests
- **Test re-encryption**: Edit `.gpg-id` to add a recipient → all passwords in directory are re-encrypted → new recipient can decrypt.
- **Test re-encryption removes access**: Edit `.gpg-id` to remove a recipient → re-encrypted passwords cannot be decrypted with the removed key.
- **Test `.gpg-id` directory scoping**: Subdirectory `.gpg-id` overrides parent. Passwords in subdirectory use subdirectory's recipients.
- **Test multi-key public key import**: Import an armored blob containing multiple public keys → all are available for encryption.

View file

@ -0,0 +1,423 @@
# Multi-Store Support — Implementation Plan
## Concept
Each **store** is an independent password repository with its own git remote, credentials, branch, and (optionally) its own PGP key pair. Users can enable/disable individual stores for the password list and separately for AutoFill. Stores can be shared between users who each decrypt with their own key (leveraging the existing `.gpg-id` per-directory mechanism from `pass`).
---
## Phase 1: Improve Test Coverage Before Refactoring
See [01-improve-test-coverage-plan.md](01-improve-test-coverage-plan.md). This is standalone and should be done before any refactoring to catch regressions.
---
## Phase 2: Data Model — `StoreConfiguration`
Create a new persistent model for store definitions. This is the foundation everything else builds on.
### 2.1 Define `StoreConfiguration` as a Core Data entity
→ Testing: [T1 — `StoreConfiguration` entity tests](#t1-storeconfiguration-entity-tests)
Add a `StoreConfiguration` entity to the existing Core Data model (`pass.xcdatamodeld`), with attributes:
- `id: UUID` — unique identifier
- `name: String` — display name (e.g. "Personal", "Work")
- `gitURL: URI` (stored as String)
- `gitBranchName: String`
- `gitAuthenticationMethod: String` (raw value of `GitAuthenticationMethod`)
- `gitUsername: String`
- `pgpKeySource: String?` (raw value of `KeySource`)
- `isVisibleInPasswords: Bool` — shown in the password list
- `isVisibleInAutoFill: Bool` — shown in AutoFill
- `sortOrder: Int16` — for user-defined ordering
- `lastSyncedTime: Date?`
Relationship: `passwords` → to-many `PasswordEntity` (inverse: `store`; cascade delete rule — deleting a store removes all its password entities).
Using Core Data instead of a separate JSON file because:
- The Core Data stack already exists and is shared across all targets via the app group
- The `StoreConfiguration``PasswordEntity` relationship gives referential integrity and cascade deletes for free
- No second persistence mechanism to maintain
- Built-in concurrency/conflict handling
### 2.2 Define `StoreConfigurationManager`
→ Testing: [T1 — `StoreConfiguration` entity tests](#t1-storeconfiguration-entity-tests), [T3 — `PasswordStoreManager` tests](#t3-passwordstoremanager-tests)
Manages the list of stores via Core Data. Provides CRUD, reordering, and lookup by ID. Observable (via `NotificationCenter` or Combine) so UI updates when stores change.
### 2.3 Migration from single-store
→ Testing: [T2 — Migration tests](#t2-migration-tests)
On first launch after upgrade, create a single `StoreConfiguration` from the current `Defaults.*` values and keychain entries. Assign all existing `PasswordEntity` rows to this store. Existing users see no change.
This is a Core Data model version migration: add the `StoreConfiguration` entity, add the `store` relationship to `PasswordEntity`, and populate it in a post-migration step.
### 2.4 Per-store secrets
→ Testing: [T5 — Per-store keychain namespace tests](#t5-per-store-keychain-namespace-tests)
Per-store secrets go in the keychain with namespaced keys:
- `"{storeID}.gitPassword"`, `"{storeID}.gitSSHPrivateKeyPassphrase"`, `"{storeID}.sshPrivateKey"`
- `"{storeID}.pgpPublicKey"`, `"{storeID}.pgpPrivateKey"`
- The existing `"pgpKeyPassphrase-{keyID}"` scheme already works across stores since it's keyed by PGP key ID.
---
## Phase 3: De-singleton the Backend
The most invasive but essential change. Requires careful sequencing.
### 3.1 Parameterize `Globals` paths
Add a method to compute the per-store repository directory:
- `repositoryURL(for storeID: UUID) -> URL` — e.g. `Library/password-stores/{storeID}/`
The database path (`dbPath`) stays single since we use one Core Data database with a relationship.
### 3.2 Make `PasswordStore` non-singleton
→ Testing: [T3 — `PasswordStoreManager` tests](#t3-passwordstoremanager-tests), [T4 — Per-store `PasswordStore` tests](#t4-per-store-passwordstore-tests)
Convert to a class that takes a `StoreConfiguration` at init:
- Each instance owns its own `storeURL`, `gitRepository`, `context`
- Inject `StoreConfiguration` (for git URL, branch, credentials) and a `PGPAgent` instance
- Keep a **`PasswordStoreManager`** that holds all active `PasswordStore` instances (keyed by store ID), lazily creating them
- `PasswordStoreManager` replaces all `PasswordStore.shared` call sites
### 3.3 Core Data: `PasswordEntity``StoreConfiguration` relationship
→ Testing: [T1 — `StoreConfiguration` entity tests](#t1-storeconfiguration-entity-tests), [T6 — `PasswordEntity` fetch filtering tests](#t6-passwordentity-fetch-filtering-tests)
Add a `store` relationship (to-one) on `PasswordEntity` pointing to `StoreConfiguration` (inverse: `passwords`, to-many, cascade delete). This replaces the need for a separate `storeID` UUID attribute — the relationship provides referential integrity and cascade deletes.
All `PasswordEntity` fetch requests must be updated to filter by store (or by set of visible stores for the password list / AutoFill). The `initPasswordEntityCoreData(url:in:)` method already takes a URL parameter; pass the per-store URL and set the `store` relationship on each created entity.
### 3.4 Make `PGPAgent` per-store
→ Testing: [T4 — Per-store `PasswordStore` tests](#t4-per-store-passwordstore-tests) (encrypt/decrypt with per-store keys)
Remove the singleton. `PasswordStore` instances each hold an optional `PGPAgent`. Stores sharing the same PGP key pair just load the same keychain entries. Stores using different keys load different ones. The `KeyStore` protocol already supports this — just pass different key names.
### 3.5 Make `GitCredential` per-store
→ Testing: [T5 — Per-store keychain namespace tests](#t5-per-store-keychain-namespace-tests)
Already not a singleton, just reads from `Defaults`. Change it to read from `StoreConfiguration` + namespaced keychain keys instead.
---
## Phase 4: Settings UI — Store Management
### 4.1 New "Stores" settings section
Replace the current single "Password Repository" and "PGP Key" rows with a section listing all configured stores, plus an "Add Store" button:
- Each store row shows: name, git host, sync status indicator
- Tapping a store opens `StoreSettingsTableViewController`
- Swipe-to-delete removes a store (with confirmation)
- Drag-to-reorder for sort order
### 4.2 `StoreSettingsTableViewController`
Per-store settings screen:
- Store name (editable text field)
- **Repository section**: Git URL, branch, username, auth method (reuse existing `GitRepositorySettingsTableViewController` logic, but scoped to this store's config)
- **PGP Key section**: Same import options as today but scoped to this store's keychain namespace. Add an option "Use same key as [other store]" for convenience.
- **Visibility section**: Two toggles — "Show in Passwords", "Show in AutoFill"
- **Sync section**: Last synced time, manual sync button
- **Danger zone**: Delete store (see §4.4 for full cleanup steps)
### 4.3 Migrate existing settings screens
`GitRepositorySettingsTableViewController`, `PGPKeyArmorImportTableViewController`, etc. currently read/write global `Defaults`. Refactor them to accept a `StoreConfiguration` and read/write to that store's Core Data entity and namespaced keychain keys instead.
### 4.4 Store lifecycle: adding a store
→ Testing: [T7 — Store lifecycle integration tests](#t7-store-lifecycle-integration-tests)
Currently, configuring git settings triggers a clone immediately (`GitRepositorySettingsTableViewController.save()``cloneAndSegueIfSuccess()`), and the clone rebuilds Core Data from the filesystem. The multi-store equivalent:
1. User taps "Add Store" → presented with `StoreSettingsTableViewController`
2. User fills in store name, git URL, branch, username, auth method
3. User imports PGP keys (public + private) for this store
4. User taps "Save" → creates a `StoreConfiguration` entity in Core Data
5. Clone is triggered for this store:
- Compute per-store repo directory: `Library/password-stores/{storeID}/`
- Call `PasswordStore.cloneRepository()` scoped to that directory
- On success: BFS-walk the cloned repo, create `PasswordEntity` rows linked to this `StoreConfiguration` via the `store` relationship
- On success: validate `.gpg-id` exists (warn if missing, since decryption will fail)
- On failure: delete the `StoreConfiguration` entity (cascade deletes any partial `PasswordEntity` rows), clean up the repo directory, remove keychain entries for this store ID
6. Post `.passwordStoreUpdated` notification so the password list refreshes
### 4.5 Store lifecycle: removing a store
→ Testing: [T7 — Store lifecycle integration tests](#t7-store-lifecycle-integration-tests)
Currently `erase()` nukes everything globally. Per-store removal must be scoped:
1. User confirms deletion (destructive action sheet)
2. Cleanup steps:
- Delete the repo directory: `Library/password-stores/{storeID}/` (rm -rf)
- Delete `StoreConfiguration` entity from Core Data → cascade-deletes all linked `PasswordEntity` rows automatically
- Remove namespaced keychain entries: `"{storeID}.gitPassword"`, `"{storeID}.gitSSHPrivateKeyPassphrase"`, `"{storeID}.sshPrivateKey"`, `"{storeID}.pgpPublicKey"`, `"{storeID}.pgpPrivateKey"`
- Drop the in-memory `PasswordStore` instance from `PasswordStoreManager`
- Post `.passwordStoreUpdated` so the password list refreshes
3. PGP key passphrase entries (`"pgpKeyPassphrase-{keyID}"`) may be shared with other stores using the same key — only remove if no other store references that key ID
### 4.6 Store lifecycle: re-cloning / changing git URL
→ Testing: [T7 — Store lifecycle integration tests](#t7-store-lifecycle-integration-tests)
When the user changes the git URL or branch of an existing store (equivalent to today's "overwrite" flow):
1. Delete the existing repo directory for this store
2. Delete all `PasswordEntity` rows linked to this `StoreConfiguration` (but keep the `StoreConfiguration` entity itself)
3. Clone the new repo into the store's directory
4. Rebuild `PasswordEntity` rows from the new clone, linked to the same `StoreConfiguration`
5. Clear and re-prompt for git credentials
### 4.7 Global "Erase all data"
→ Testing: [T7 — Store lifecycle integration tests](#t7-store-lifecycle-integration-tests) (test global erase)
The existing "Erase Password Store Data" action in Advanced Settings should:
1. Delete all `StoreConfiguration` entities (cascade-deletes all `PasswordEntity` rows)
2. Delete all repo directories under `Library/password-stores/`
3. Remove all keychain entries (`AppKeychain.shared.removeAllContent()`)
4. Clear all UserDefaults (`Defaults.removeAll()`)
5. Clear passcode, uninit all PGP agents, drop all `PasswordStore` instances
6. Post `.passwordStoreErased`
---
## Phase 5: Password List UI — Multi-Store Browsing
### 5.1 Unified password list
`PasswordNavigationViewController` should show passwords from all visible stores together:
- **Folder mode**: Add a top-level grouping by store name, then the folder hierarchy within each store. The store name row could have a distinct style (e.g. bold, with a colored dot or icon).
- **Flat mode**: Show all passwords from all visible stores. Subtitle or accessory showing which store each password belongs to.
- **Search**: Searches across all visible stores simultaneously. Results annotated with store name.
### 5.2 Password detail
`PasswordDetailTableViewController` needs to know which store a password belongs to (to decrypt with the right `PGPAgent` and write changes back to the right repo). Pass the store context through from the list.
### 5.3 Add password flow
`AddPasswordTableViewController` needs a store picker if multiple stores are visible. Default to a "primary" store or the last-used one.
### 5.4 Sync
→ Testing: [T9 — Sync tests](#t9-sync-tests)
Pull-to-refresh in the password list syncs all visible stores (sequentially or in parallel). Show per-store sync status. Allow syncing individual stores from their settings or via long-press.
---
## Phase 6: AutoFill Extension
### 6.1 Multi-store AutoFill
→ Testing: [T8 — AutoFill multi-store tests](#t8-autofill-multi-store-tests)
`CredentialProviderViewController`:
- Fetch passwords from all stores where `isVisibleInAutoFill == true`
- The "Suggested" section should search across all AutoFill-visible stores
- Each password entry carries its store context for decryption
- No store picker needed — just include all enabled stores transparently
- Consider showing store name in the cell subtitle for disambiguation
### 6.2 QuickType integration
→ Testing: [T8 — AutoFill multi-store tests](#t8-autofill-multi-store-tests) (store ID in `recordIdentifier`)
`provideCredentialWithoutUserInteraction` needs to try the right store's PGP agent for decryption. Since it gets a `credentialIdentity` (which contains a `recordIdentifier` = password path), the path must now encode or be mappable to a store ID.
---
## Phase 7: Extensions & Shortcuts
### 7.1 passExtension (share extension)
Same multi-store search as AutoFill. Minor.
### 7.2 Shortcuts
`SyncRepositoryIntentHandler`:
- Add a store parameter to the intent (optional — if nil, sync all stores)
- Register each store as a Shortcut parameter option
- Support "Sync All" and "Sync [store name]"
---
## Phase 8: Multi-Recipient Encryption
See [02-multi-recipient-encryption-plan.md](02-multi-recipient-encryption-plan.md). This is standalone and can be implemented before or after multi-store support. In a multi-store context, `isEnableGPGIDOn` becomes a per-store setting.
---
## Implementation Order
| Step | Phase | Description | Depends On |
|------|-------|-------------|------------|
| 1 | 1 | Improve test coverage (see [separate plan](01-improve-test-coverage-plan.md)) | — |
| 2a | 2 | `StoreConfiguration` Core Data entity + relationship to `PasswordEntity` + model migration | Phase 1 |
| 2b | 2 | `StoreConfigurationManager` + single-store migration from existing Defaults/keychain | Step 2a |
| 2t | T | Tests: `StoreConfiguration` CRUD, cascade delete, migration (T1, T2) | Steps 2a+2b |
| 3a | 3 | Parameterize `Globals` paths (per-store repo directory) | Step 2a |
| 3b | 3 | Namespace keychain keys per store | Step 2a |
| 3bt | T | Tests: per-store keychain namespace (T5) | Step 3b |
| 3c | 3 | De-singleton `PGPAgent` | Steps 2a+3a+3b |
| 3d | 3 | De-singleton `PasswordStore``PasswordStoreManager` | Steps 2b-3c |
| 3dt | T | Tests: `PasswordStoreManager`, per-store `PasswordStore`, entity filtering (T3, T4, T6) | Step 3d |
| 3e | 3 | Per-store `GitCredential` | Steps 3b+3d |
| 3f | 3 | Store lifecycle: add/clone, remove/cleanup, re-clone, global erase | Steps 3d+3e |
| 3ft | T | Tests: store lifecycle integration (T7) | Step 3f |
| 4a | 4 | Store management UI (add/edit/delete/reorder) | Step 3f |
| 4b | 4 | Migrate existing settings screens to per-store | Step 4a |
| 5a | 5 | Multi-store password list | Step 3d |
| 5b | 5 | Multi-store add/edit/detail | Step 5a |
| 5c | 5 | Multi-store sync | Steps 3e+5a |
| 5ct | T | Tests: sync (T9) | Step 5c |
| 6a | 6 | Multi-store AutoFill | Step 3d |
| 6t | T | Tests: AutoFill multi-store (T8) | Step 6a |
| 7a | 7 | Multi-store Shortcuts | Step 3d |
| 8a | 8 | Multi-recipient encryption (see [separate plan](02-multi-recipient-encryption-plan.md)) | Step 3d |
---
## Testing Plan
For baseline test coverage of existing code, see [01-improve-test-coverage-plan.md](01-improve-test-coverage-plan.md).
### Testing new multi-store code
#### T1: `StoreConfiguration` entity tests
- **Test CRUD**: Create, read, update, delete `StoreConfiguration` entities.
- **Test cascade delete**: Delete a `StoreConfiguration` → verify all linked `PasswordEntity` rows are deleted.
- **Test relationship integrity**: Create `PasswordEntity` rows linked to a store → verify fetching by store returns the right entities.
- **Test `StoreConfigurationManager`**: Create, list, reorder, delete stores via the manager.
#### T2: Migration tests
- **Test fresh install**: No existing data → no `StoreConfiguration` created, app works.
- **Test upgrade migration from single-store**:
1. Set up a pre-migration Core Data database (using the old model version) with `PasswordEntity` rows, populate `Defaults` with git URL/branch/username, and populate keychain with PGP + SSH keys.
2. Run the migration.
3. Verify: one `StoreConfiguration` exists with values from Defaults, all `PasswordEntity` rows are linked to it, keychain entries are namespaced under the new store's ID.
- **Test idempotency**: Running migration twice doesn't create duplicate stores.
- **Test migration with empty repo** (no passwords, just settings): Still creates a `StoreConfiguration`.
#### T3: `PasswordStoreManager` tests
- **Test store lookup by ID**.
- **Test lazy instantiation**: Requesting a store creates `PasswordStore` on demand.
- **Test listing visible stores** (filtered by `isVisibleInPasswords` / `isVisibleInAutoFill`).
- **Test adding/removing stores updates the manager**.
#### T4: Per-store `PasswordStore` tests
- **Test clone scoped to per-store directory**: Clone into `Library/password-stores/{storeID}/`, verify `PasswordEntity` rows are linked to the right `StoreConfiguration`.
- **Test two stores independently**: Clone two different repos, verify each store's entities are separate, deleting one doesn't affect the other.
- **Test `eraseStoreData` scoped to one store**: Only that store's directory and entities are deleted.
- **Test encrypt/decrypt with per-store PGP keys**: Store A uses key pair X, store B uses key pair Y, each can only decrypt its own passwords.
- **Test store sharing one PGP key pair**: Two stores referencing the same keychain entries both decrypt correctly.
#### T5: Per-store keychain namespace tests
- **Test namespaced keys don't collide**: Store A's `"{A}.gitPassword"` and store B's `"{B}.gitPassword"` are independent.
- **Test `removeAllContent(withPrefix:)`**: Removing store A's keys doesn't affect store B's.
- **Test `pgpKeyPassphrase-{keyID}`** shared across stores using the same key.
#### T6: `PasswordEntity` fetch filtering tests
- **Test `fetchAll` filtered by one store**.
- **Test `fetchAll` filtered by multiple visible stores** (the AutoFill / password list scenario).
- **Test `fetchUnsynced` filtered by store**.
- **Test search across multiple stores**.
#### T7: Store lifecycle integration tests
- **Test add store flow**: Create config → clone → BFS walk → entities linked → notification posted.
- **Test remove store flow**: Delete config → cascade deletes entities → repo directory removed → keychain cleaned → notification posted.
- **Test re-clone flow**: Change git URL → old entities deleted → new clone → new entities → same `StoreConfiguration`.
- **Test global erase**: Multiple stores → all gone.
- **Test clone failure cleanup**: Clone fails → `StoreConfiguration` deleted → no orphan entities or directories.
#### T8: AutoFill multi-store tests
- **Test credential listing from multiple stores**: Entries from all AutoFill-visible stores appear.
- **Test store ID encoded in `recordIdentifier`**: Can map a credential identity back to the correct store for decryption.
- **Test filtering**: Only `isVisibleInAutoFill == true` stores appear.
#### T9: Sync tests
- **Test pull updates one store's entities without affecting others**.
- **Test sync-all triggers pull for each visible store**.
### Test infrastructure additions needed
- **Multi-store `CoreDataTestCase`**: Extend `CoreDataTestCase` to support the new model version with `StoreConfiguration`. Provide a helper to create a `StoreConfiguration` + linked entities in one call.
- **Pre-migration database fixture**: A snapshot of the old Core Data model (without `StoreConfiguration`) to use in migration tests. Can be a `.sqlite` file committed to the test bundle.
---
## Risks & Considerations
- **Data migration**: Existing users must be migrated seamlessly. The migration (steps 2a-2b) should be idempotent and tested thoroughly.
- **Core Data migration**: Adding the `StoreConfiguration` entity and the `store` relationship on `PasswordEntity` requires a lightweight migration (new entity + new optional relationship). The post-migration step creates a default `StoreConfiguration` from existing Defaults and assigns all existing `PasswordEntity` rows to it.
- **Memory**: Multiple `PasswordStore` instances each holding a `GTRepository` and `PGPAgent` — lazy instantiation is important. Only active/visible stores should be loaded.
- **Concurrency**: Git operations (pull/push) across multiple stores should not block each other. Use per-store serial queues.
- **AutoFill performance**: The extension has strict memory limits (~30MB). Loading all stores' Core Data is fine (single DB), but loading multiple PGP agents may be expensive. Decrypt lazily, only when the user selects a password.
- **Backward compatibility**: Older versions won't understand the new data layout. Consider a one-way migration flag.
---
## Context
### Prompt
I want to add support for several separate password repositories, each with a unique repository connection (url, authnetication), and potentially separate encryption/decryption keys.
Another GUI app that supports this is QtPass. There is information about this its readme: https://raw.githubusercontent.com/IJHack/QtPass/refs/heads/main/README.md
It calls it "profiles". I would probably call it "stores".
I want to be able to configure which stores are enabled when I view the list, and separately also for the autofill feature.
It should be possible to share a store with another user (who would be using a separate key on their end).
Make a plan for what needs to be done to support this in this application.
### Key Architecture Facts
- `PasswordStore.shared` singleton referenced from ~20+ call sites (app, AutoFill, passExtension, Shortcuts)
- `PGPAgent.shared` singleton holds single key pair
- `Globals` has all paths as `static let` (single repo, single DB, single key paths)
- `DefaultsKeys` — all git/PGP settings single-valued in shared UserDefaults
- `AppKeychain.shared` — flat keys, no per-store namespace
- Core Data: single `PasswordEntity` entity, no store discriminator, single SQLite DB
- `PersistenceController.shared` — single NSPersistentContainer
- UI: UITabBarController with 2 tabs (Passwords, Settings). Passwords tab uses PasswordNavigationViewController
- AutoFill: CredentialProviderViewController uses PasswordStore.shared directly
- App group + keychain group shared across all targets
- `.gpg-id` per-directory key selection already exists (closest to multi-key concept)
- QtPass calls them "profiles" — each can have different git repo and GPG key
### User Requirements
- Multiple password stores, each with unique repo connection (URL, auth) and potentially separate PGP keys
- Call them "stores" (not profiles)
- Configure which stores are visible in password list vs AutoFill separately
- Support sharing a store with another user (who uses a different key)