From 7a154ed8ec620897d1ebed7c07526cb6a977362e Mon Sep 17 00:00:00 2001 From: David Lawrence Date: Tue, 28 Jun 2016 15:36:54 -0700 Subject: [PATCH 1/3] tear out import/export Signed-off-by: David Lawrence (github: endophage) --- cmd/notary/integration_test.go | 304 ----------- cmd/notary/keys.go | 256 +-------- cmd/notary/keys_test.go | 87 ---- cmd/notary/main_test.go | 4 - cryptoservice/crypto_service.go | 28 + cryptoservice/import_export.go | 313 ----------- .../import_export_compatibility_test.go | 164 ------ cryptoservice/import_export_test.go | 491 ------------------ signer/keydbstore/keydbstore.go | 6 - signer/keydbstore/rethink_keydbstore.go | 6 - trustmanager/keyfilestore.go | 21 - trustmanager/keyfilestore_test.go | 72 --- trustmanager/keystore.go | 1 - trustmanager/yubikey/yubikeystore.go | 6 - trustmanager/yubikey/yubikeystore_test.go | 25 - 15 files changed, 30 insertions(+), 1754 deletions(-) delete mode 100644 cryptoservice/import_export.go delete mode 100644 cryptoservice/import_export_compatibility_test.go delete mode 100644 cryptoservice/import_export_test.go diff --git a/cmd/notary/integration_test.go b/cmd/notary/integration_test.go index 1ffb1097e..2c8841f3d 100644 --- a/cmd/notary/integration_test.go +++ b/cmd/notary/integration_test.go @@ -929,21 +929,6 @@ func TestClientDelegationsPublishing(t *testing.T) { _, err = runCommand(t, tempDir, "-s", server.URL, "publish", "gun") require.NoError(t, err) - // Now remove this key, and make a new file to import the delegation's key from - require.NoError(t, os.Remove(filepath.Join(keyDir, canonicalKeyID+".key"))) - tempPrivFile, err := ioutil.TempFile("/tmp", "privfile") - require.NoError(t, err) - defer os.Remove(tempPrivFile.Name()) - - // Write the private key to a file so we can import it - _, err = tempPrivFile.Write(privKeyBytesNoRole) - require.NoError(t, err) - tempPrivFile.Close() - - // Import the private key, associating it with our delegation role - _, err = runCommand(t, tempDir, "key", "import", tempPrivFile.Name(), "--role", "targets/releases") - require.NoError(t, err) - // add a target using the delegation -- will only add to targets/releases _, err = runCommand(t, tempDir, "add", "gun", target, tempTargetFile.Name(), "--roles", "targets/releases") require.NoError(t, err) @@ -1133,208 +1118,6 @@ func TestClientKeyGenerationRotation(t *testing.T) { require.True(t, strings.Contains(string(output), target)) } -// Tests backup/restore root+signing keys - repo with restored keys should be -// able to publish successfully -func TestClientKeyBackupAndRestore(t *testing.T) { - // -- setup -- - setUp(t) - - dirs := make([]string, 3) - for i := 0; i < 3; i++ { - tempDir := tempDirWithConfig(t, "{}") - defer os.RemoveAll(tempDir) - dirs[i] = tempDir - } - - tempfiles := make([]string, 2) - for i := 0; i < 2; i++ { - tempFile, err := ioutil.TempFile("", "tempfile") - require.NoError(t, err) - tempFile.Close() - tempfiles[i] = tempFile.Name() - defer os.Remove(tempFile.Name()) - } - - server := setupServer() - defer server.Close() - - var ( - target = "sdgkadga" - err error - ) - - // create two repos and publish a target - for _, gun := range []string{"gun1", "gun2"} { - _, err = runCommand(t, dirs[0], "-s", server.URL, "init", gun) - require.NoError(t, err) - - assertSuccessfullyPublish( - t, dirs[0], server.URL, gun, target, tempfiles[0]) - } - assertNumKeys(t, dirs[0], 1, 4, true) - - // -- tests -- - zipfile := tempfiles[0] + ".zip" - defer os.Remove(zipfile) - - // backup then restore all keys - _, err = runCommand(t, dirs[0], "key", "backup", zipfile) - require.NoError(t, err) - - _, err = runCommand(t, dirs[1], "key", "restore", zipfile) - require.NoError(t, err) - // all keys should be there, including root because the root key was backed up to disk, - // and export just backs up all the keys on disk - assertNumKeys(t, dirs[1], 1, 4, !rootOnHardware()) - - // can list and publish to both repos using restored keys - for _, gun := range []string{"gun1", "gun2"} { - output, err := runCommand(t, dirs[1], "-s", server.URL, "list", gun) - require.NoError(t, err) - require.True(t, strings.Contains(string(output), target)) - - assertSuccessfullyPublish( - t, dirs[1], server.URL, gun, target+"2", tempfiles[1]) - } - - // backup and restore keys for one gun - _, err = runCommand(t, dirs[0], "key", "backup", zipfile, "-g", "gun1") - require.NoError(t, err) - - _, err = runCommand(t, dirs[2], "key", "restore", zipfile) - require.NoError(t, err) - - // this function is declared is in the build-tagged setup files - if rootOnHardware() { - // hardware root is still present, and the key will ONLY be on hardware - // and not on disk - assertNumKeys(t, dirs[2], 1, 2, false) - } else { - // only 2 signing keys should be there, and no root key - assertNumKeys(t, dirs[2], 0, 2, true) - } -} - -// Generate a root key and export the root key only. Return the key ID -// exported. -func exportRoot(t *testing.T, exportTo string) string { - tempDir := tempDirWithConfig(t, "{}") - defer os.RemoveAll(tempDir) - - // generate root key produces a single root key and no other keys - _, err := runCommand(t, tempDir, "key", "generate", data.ECDSAKey) - require.NoError(t, err) - oldRoot, _ := assertNumKeys(t, tempDir, 1, 0, true) - - // export does not require a password - oldNewCommand := NewNotaryCommand - NewNotaryCommand = func() *cobra.Command { - commander := ¬aryCommander{getRetriever: func() notary.PassRetriever { return nil }} - return commander.GetCommand() - } - defer func() { // but import will, later - NewNotaryCommand = oldNewCommand - }() - - _, err = runCommand( - t, tempDir, "key", "export", oldRoot[0], exportTo) - require.NoError(t, err) - - return oldRoot[0] -} - -// Tests import/export root key only -func TestClientKeyImportExportRootOnly(t *testing.T) { - // -- setup -- - setUp(t) - - tempDir := tempDirWithConfig(t, "{}") - defer os.RemoveAll(tempDir) - - server := setupServer() - defer server.Close() - - var ( - target = "sdgkadga" - rootKeyID string - ) - - tempFile, err := ioutil.TempFile("", "pemfile") - require.NoError(t, err) - // close later, because we might need to write to it - defer os.Remove(tempFile.Name()) - - // -- tests -- - - if rootOnHardware() { - t.Log("Cannot export a key from hardware. Will generate one to import.") - - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) - require.NoError(t, err) - - pemBytes, err := trustmanager.EncryptPrivateKey(privKey, "root", testPassphrase) - require.NoError(t, err) - - nBytes, err := tempFile.Write(pemBytes) - require.NoError(t, err) - tempFile.Close() - require.Equal(t, len(pemBytes), nBytes) - rootKeyID = privKey.ID() - } else { - tempFile.Close() - rootKeyID = exportRoot(t, tempFile.Name()) - } - - // import the key - _, err = runCommand(t, tempDir, "key", "import", tempFile.Name()) - require.NoError(t, err) - - // if there is hardware available, root will only be on hardware, and not - // on disk - newRoot, _ := assertNumKeys(t, tempDir, 1, 0, !rootOnHardware()) - require.Equal(t, rootKeyID, newRoot[0]) - - // Just to make sure, init a repo and publish - _, err = runCommand(t, tempDir, "-s", server.URL, "init", "gun") - require.NoError(t, err) - assertNumKeys(t, tempDir, 1, 2, !rootOnHardware()) - assertSuccessfullyPublish( - t, tempDir, server.URL, "gun", target, tempFile.Name()) - - // Now assert that bad root keys give an error - // Try importing an unencrypted root key: - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) - require.NoError(t, err) - decryptedPEMBytes, err := trustmanager.KeyToPEM(privKey, data.CanonicalRootRole) - decryptedKeyFile, err := ioutil.TempFile("", "decryptedPem") - require.NoError(t, err) - // close later, because we might need to write to it - defer os.Remove(decryptedKeyFile.Name()) - - nBytes, err := decryptedKeyFile.Write(decryptedPEMBytes) - require.NoError(t, err) - decryptedKeyFile.Close() - require.Equal(t, len(decryptedPEMBytes), nBytes) - // import the key - _, err = runCommand(t, tempDir, "key", "import", decryptedKeyFile.Name()) - require.Error(t, err) - - // Now try importing an invalid PEM as a root key - invalidPEMBytes := []byte("this is not PEM") - invalidPEMFile, err := ioutil.TempFile("", "invalidPem") - require.NoError(t, err) - // close later, because we might need to write to it - defer os.Remove(invalidPEMFile.Name()) - - nBytes, err = invalidPEMFile.Write(invalidPEMBytes) - require.NoError(t, err) - invalidPEMFile.Close() - require.Equal(t, len(invalidPEMBytes), nBytes) - // import the key - _, err = runCommand(t, tempDir, "key", "import", invalidPEMFile.Name()) - require.Error(t, err) -} - // Helper method to get the subdirectory for TUF keys func getKeySubdir(role, gun string) string { subdir := notary.PrivDir @@ -1350,93 +1133,6 @@ func getKeySubdir(role, gun string) string { } } -// Tests importing and exporting keys for all different roles and GUNs -func TestClientKeyImportExportAllRoles(t *testing.T) { - if rootOnHardware() { - t.Log("Cannot import or export a non-root key from hardware. Will skip test.") - return - } - // -- setup -- - setUp(t) - - tempDir := tempDirWithConfig(t, "{}") - defer os.RemoveAll(tempDir) - - server := setupServer() - defer server.Close() - - // -- tests -- - _, err := runCommand(t, tempDir, "-s", server.URL, "init", "gun") - require.NoError(t, err) - - testRoles := append(data.BaseRoles, "targets/releases") - // Test importing and exporting keys to all base roles and delegation role - for _, role := range testRoles { - // Do this while importing keys that have the PEM header role set or have --role set on import - for _, setKeyRole := range []bool{true, false} { - // Make a new key for this role - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) - require.NoError(t, err) - - // Make a tempfile for importing - tempFile, err := ioutil.TempFile("", "pemfile") - require.NoError(t, err) - - // Specify the role in the PEM header - pemBytes, err := trustmanager.EncryptPrivateKey(privKey, role, testPassphrase) - require.NoError(t, err) - ioutil.WriteFile(tempFile.Name(), pemBytes, 0644) - - // If we need to set the key role with the --role flag, do so on import - if setKeyRole { - // If it's targets/snapshot we must specify the GUN - if role == data.CanonicalTargetsRole || role == data.CanonicalSnapshotRole { - _, err = runCommand(t, tempDir, "key", "import", tempFile.Name(), "--gun", "gun", "--role", role) - } else { - _, err = runCommand(t, tempDir, "key", "import", tempFile.Name(), "--role", role) - } - } else { - // If it's targets/snapshot we must specify the GUN - if role == data.CanonicalTargetsRole || role == data.CanonicalSnapshotRole { - _, err = runCommand(t, tempDir, "key", "import", tempFile.Name(), "--gun", "gun") - } else { - _, err = runCommand(t, tempDir, "key", "import", tempFile.Name()) - } - } - require.NoError(t, err) - - // Test that we imported correctly - keySubdir := getKeySubdir(role, "gun") - _, err = os.Stat(filepath.Join(tempDir, keySubdir, privKey.ID()+".key")) - require.Nil(t, err) - - // Remove the input file so we can test exporting - require.NoError(t, os.Remove(tempFile.Name())) - - // Make a tempfile for exporting to - tempFile, err = ioutil.TempFile("", "pemfile") - require.NoError(t, err) - - // Ensure exporting this key by ID gets the same key - _, err = runCommand(t, tempDir, "key", "export", privKey.ID(), tempFile.Name()) - require.NoError(t, err) - // Compare the bytes of the exported file and the root key file in the repo - exportedBytes, err := ioutil.ReadFile(tempFile.Name()) - require.NoError(t, err) - repoBytes, err := ioutil.ReadFile(filepath.Join(tempDir, keySubdir, privKey.ID()+".key")) - require.NoError(t, err) - require.Equal(t, repoBytes, exportedBytes) - - // Ensure exporting this key and changing the passphrase works - _, err = runCommand(t, tempDir, "key", "export", privKey.ID(), tempFile.Name(), "-p") - require.NoError(t, err) - - // Remove the export file for cleanup - require.NoError(t, os.Remove(tempFile.Name())) - } - } -} - // Tests default root key generation func TestDefaultRootKeyGeneration(t *testing.T) { // -- setup -- diff --git a/cmd/notary/keys.go b/cmd/notary/keys.go index d98bbc0e0..6d17a26c6 100644 --- a/cmd/notary/keys.go +++ b/cmd/notary/keys.go @@ -1,11 +1,8 @@ package main import ( - "archive/zip" "fmt" "io" - "io/ioutil" - "os" "path/filepath" "strconv" "strings" @@ -44,30 +41,6 @@ var cmdKeyGenerateRootKeyTemplate = usageTemplate{ Long: "Generates a new root key with a given algorithm. If hardware key storage (e.g. a Yubikey) is available, the key will be stored both on hardware and on disk (so that it can be backed up). Please make sure to back up and then remove this on-key disk immediately afterwards.", } -var cmdKeysBackupTemplate = usageTemplate{ - Use: "backup [ zipfilename ]", - Short: "Backs up all your on-disk keys to a ZIP file.", - Long: "Backs up all of your accessible of keys. The keys are reencrypted with a new passphrase. The output is a ZIP file. If the --gun option is passed, only signing keys and no root keys will be backed up. Does not work on keys that are only in hardware (e.g. Yubikeys).", -} - -var cmdKeyExportTemplate = usageTemplate{ - Use: "export [ keyID ] [ pemfilename ]", - Short: "Export a private key on disk to a PEM file.", - Long: "Exports a single private key on disk, without reencrypting. The output is a PEM file. Does not work on keys that are only in hardware (e.g. Yubikeys).", -} - -var cmdKeysRestoreTemplate = usageTemplate{ - Use: "restore [ zipfilename ]", - Short: "Restore multiple keys from a ZIP file.", - Long: "Restores one or more keys from a ZIP file. If hardware key storage (e.g. a Yubikey) is available, root keys will be imported into the hardware, but not backed up to disk in the same location as the other, non-root keys.", -} - -var cmdKeyImportTemplate = usageTemplate{ - Use: "import [ pemfilename ]", - Short: "Imports a key from a PEM file.", - Long: "Imports a single key from a PEM file. If a hardware key storage (e.g. Yubikey) is available, the root key will be imported into the hardware but not backed up on disk again.", -} - var cmdKeyRemoveTemplate = usageTemplate{ Use: "remove [ keyID ]", Short: "Removes the key with the given keyID.", @@ -86,12 +59,8 @@ type keyCommander struct { getRetriever func() notary.PassRetriever // these are for command line parsing - no need to set - keysExportChangePassphrase bool - keysExportGUN string - keysImportGUN string - keysImportRole string - rotateKeyRole string - rotateKeyServerManaged bool + rotateKeyRole string + rotateKeyServerManaged bool input io.Reader } @@ -100,28 +69,8 @@ func (k *keyCommander) GetCommand() *cobra.Command { cmd := cmdKeyTemplate.ToCommand(nil) cmd.AddCommand(cmdKeyListTemplate.ToCommand(k.keysList)) cmd.AddCommand(cmdKeyGenerateRootKeyTemplate.ToCommand(k.keysGenerateRootKey)) - cmd.AddCommand(cmdKeysRestoreTemplate.ToCommand(k.keysRestore)) - cmdKeysImport := cmdKeyImportTemplate.ToCommand(k.keysImport) - cmdKeysImport.Flags().StringVarP( - &k.keysImportGUN, "gun", "g", "", "Globally Unique Name to import key to") - cmdKeysImport.Flags().StringVarP( - &k.keysImportRole, "role", "r", "", "Role to import key to (if not in PEM headers)") - cmd.AddCommand(cmdKeysImport) - cmd.AddCommand(cmdKeyRemoveTemplate.ToCommand(k.keyRemove)) cmd.AddCommand(cmdKeyPasswdTemplate.ToCommand(k.keyPassphraseChange)) - - cmdKeysBackup := cmdKeysBackupTemplate.ToCommand(k.keysBackup) - cmdKeysBackup.Flags().StringVarP( - &k.keysExportGUN, "gun", "g", "", "Globally Unique Name to export keys for") - cmd.AddCommand(cmdKeysBackup) - - cmdKeyExport := cmdKeyExportTemplate.ToCommand(k.keysExport) - cmdKeyExport.Flags().BoolVarP( - &k.keysExportChangePassphrase, "change-passphrase", "p", false, - "Set a new passphrase for the key being exported") - cmd.AddCommand(cmdKeyExport) - cmdRotateKey := cmdRotateKeyTemplate.ToCommand(k.keysRotate) cmdRotateKey.Flags().BoolVarP(&k.rotateKeyServerManaged, "server-managed", "r", false, "Signing and key management will be handled by the remote server "+ @@ -198,207 +147,6 @@ func (k *keyCommander) keysGenerateRootKey(cmd *cobra.Command, args []string) er return nil } -// keysBackup exports a collection of keys to a ZIP file -func (k *keyCommander) keysBackup(cmd *cobra.Command, args []string) error { - if len(args) < 1 { - cmd.Usage() - return fmt.Errorf("Must specify output filename for export") - } - - config, err := k.configGetter() - if err != nil { - return err - } - ks, err := k.getKeyStores(config, false, false) - if err != nil { - return err - } - exportFilename := args[0] - - cs := cryptoservice.NewCryptoService(ks...) - - exportFile, err := os.Create(exportFilename) - if err != nil { - return fmt.Errorf("Error creating output file: %v", err) - } - - // Must use a different passphrase retriever to avoid caching the - // unlocking passphrase and reusing that. - exportRetriever := k.getRetriever() - if k.keysExportGUN != "" { - err = cs.ExportKeysByGUN(exportFile, k.keysExportGUN, exportRetriever) - } else { - err = cs.ExportAllKeys(exportFile, exportRetriever) - } - - exportFile.Close() - - if err != nil { - os.Remove(exportFilename) - return fmt.Errorf("Error exporting keys: %v", err) - } - return nil -} - -// keysExport exports a key by ID to a PEM file -func (k *keyCommander) keysExport(cmd *cobra.Command, args []string) error { - if len(args) < 2 { - cmd.Usage() - return fmt.Errorf("Must specify key ID and output filename for export") - } - - keyID := args[0] - exportFilename := args[1] - - if len(keyID) != notary.Sha256HexSize { - return fmt.Errorf("Please specify a valid key ID") - } - - config, err := k.configGetter() - if err != nil { - return err - } - ks, err := k.getKeyStores(config, true, false) - if err != nil { - return err - } - - cs := cryptoservice.NewCryptoService(ks...) - keyInfo, err := cs.GetKeyInfo(keyID) - if err != nil { - return fmt.Errorf("Could not retrieve info for key %s", keyID) - } - - exportFile, err := os.Create(exportFilename) - if err != nil { - return fmt.Errorf("Error creating output file: %v", err) - } - if k.keysExportChangePassphrase { - // Must use a different passphrase retriever to avoid caching the - // unlocking passphrase and reusing that. - exportRetriever := k.getRetriever() - err = cs.ExportKeyReencrypt(exportFile, keyID, exportRetriever) - } else { - err = cs.ExportKey(exportFile, keyID, keyInfo.Role) - } - exportFile.Close() - if err != nil { - os.Remove(exportFilename) - return fmt.Errorf("Error exporting %s key: %v", keyInfo.Role, err) - } - return nil -} - -// keysRestore imports keys from a ZIP file -func (k *keyCommander) keysRestore(cmd *cobra.Command, args []string) error { - if len(args) < 1 { - cmd.Usage() - return fmt.Errorf("Must specify input filename for import") - } - - importFilename := args[0] - - config, err := k.configGetter() - if err != nil { - return err - } - ks, err := k.getKeyStores(config, true, false) - if err != nil { - return err - } - cs := cryptoservice.NewCryptoService(ks...) - - zipReader, err := zip.OpenReader(importFilename) - if err != nil { - return fmt.Errorf("Opening file for import: %v", err) - } - defer zipReader.Close() - - err = cs.ImportKeysZip(zipReader.Reader, k.getRetriever()) - - if err != nil { - return fmt.Errorf("Error importing keys: %v", err) - } - return nil -} - -// keysImport imports a private key from a PEM file for a role -func (k *keyCommander) keysImport(cmd *cobra.Command, args []string) error { - if len(args) != 1 { - cmd.Usage() - return fmt.Errorf("Must specify input filename for import") - } - - config, err := k.configGetter() - if err != nil { - return err - } - ks, err := k.getKeyStores(config, true, false) - if err != nil { - return err - } - - importFilename := args[0] - - importFile, err := os.Open(importFilename) - if err != nil { - return fmt.Errorf("Opening file for import: %v", err) - } - defer importFile.Close() - - pemBytes, err := ioutil.ReadAll(importFile) - if err != nil { - return fmt.Errorf("Error reading input file: %v", err) - } - - pemRole := trustmanager.ReadRoleFromPEM(pemBytes) - - // If the PEM key doesn't have a role in it, we must have --role set - if pemRole == "" && k.keysImportRole == "" { - return fmt.Errorf("Could not infer role, and no role was specified for key") - } - - // If both PEM role and a --role are provided and they don't match, error - if pemRole != "" && k.keysImportRole != "" && pemRole != k.keysImportRole { - return fmt.Errorf("Specified role %s does not match role %s in PEM headers", k.keysImportRole, pemRole) - } - - // Determine which role to add to between PEM headers and --role flag: - var importRole string - if k.keysImportRole != "" { - importRole = k.keysImportRole - } else { - importRole = pemRole - } - - // If we're importing to targets or snapshot, we need a GUN - if (importRole == data.CanonicalTargetsRole || importRole == data.CanonicalSnapshotRole) && k.keysImportGUN == "" { - return fmt.Errorf("Must specify GUN for %s key", importRole) - } - - // Root keys must be encrypted - if importRole == data.CanonicalRootRole { - if err = cryptoservice.CheckRootKeyIsEncrypted(pemBytes); err != nil { - return err - } - } - - cs := cryptoservice.NewCryptoService(ks...) - // Convert to a data.PrivateKey, potentially decrypting the key - privKey, err := trustmanager.ParsePEMPrivateKey(pemBytes, "") - if err != nil { - privKey, _, err = trustmanager.GetPasswdDecryptBytes(k.getRetriever(), pemBytes, "", "imported "+importRole) - if err != nil { - return err - } - } - err = cs.AddKey(importRole, k.keysImportGUN, privKey) - if err != nil { - return fmt.Errorf("Error importing key: %v", err) - } - return nil -} - func (k *keyCommander) keysRotate(cmd *cobra.Command, args []string) error { if len(args) < 2 { cmd.Usage() diff --git a/cmd/notary/keys_test.go b/cmd/notary/keys_test.go index bc4eae7a0..44cf65dbc 100644 --- a/cmd/notary/keys_test.go +++ b/cmd/notary/keys_test.go @@ -533,93 +533,6 @@ func TestChangeKeyPassphraseNonexistentID(t *testing.T) { require.Contains(t, err.Error(), "could not retrieve local key for key ID provided") } -func TestKeyImportMismatchingRoles(t *testing.T) { - setUp(t) - k := &keyCommander{ - configGetter: func() (*viper.Viper, error) { return viper.New(), nil }, - getRetriever: func() notary.PassRetriever { return passphrase.ConstantRetriever("pass") }, - keysImportRole: "targets", - } - tempFileName := generateTempTestKeyFile(t, "snapshot") - defer os.Remove(tempFileName) - - err := k.keysImport(&cobra.Command{}, []string{tempFileName}) - require.Error(t, err) - require.Contains(t, err.Error(), "does not match role") -} - -func TestKeyImportNoGUNForTargetsPEM(t *testing.T) { - setUp(t) - k := &keyCommander{ - configGetter: func() (*viper.Viper, error) { return viper.New(), nil }, - getRetriever: func() notary.PassRetriever { return passphrase.ConstantRetriever("pass") }, - } - tempFileName := generateTempTestKeyFile(t, "targets") - defer os.Remove(tempFileName) - - err := k.keysImport(&cobra.Command{}, []string{tempFileName}) - require.Error(t, err) - require.Contains(t, err.Error(), "Must specify GUN") -} - -func TestKeyImportNoGUNForSnapshotPEM(t *testing.T) { - setUp(t) - k := &keyCommander{ - configGetter: func() (*viper.Viper, error) { return viper.New(), nil }, - getRetriever: func() notary.PassRetriever { return passphrase.ConstantRetriever("pass") }, - } - tempFileName := generateTempTestKeyFile(t, "snapshot") - defer os.Remove(tempFileName) - - err := k.keysImport(&cobra.Command{}, []string{tempFileName}) - require.Error(t, err) - require.Contains(t, err.Error(), "Must specify GUN") -} - -func TestKeyImportNoGUNForTargetsFlag(t *testing.T) { - setUp(t) - k := &keyCommander{ - configGetter: func() (*viper.Viper, error) { return viper.New(), nil }, - getRetriever: func() notary.PassRetriever { return passphrase.ConstantRetriever("pass") }, - keysImportRole: "targets", - } - tempFileName := generateTempTestKeyFile(t, "") - defer os.Remove(tempFileName) - - err := k.keysImport(&cobra.Command{}, []string{tempFileName}) - require.Error(t, err) - require.Contains(t, err.Error(), "Must specify GUN") -} - -func TestKeyImportNoGUNForSnapshotFlag(t *testing.T) { - setUp(t) - k := &keyCommander{ - configGetter: func() (*viper.Viper, error) { return viper.New(), nil }, - getRetriever: func() notary.PassRetriever { return passphrase.ConstantRetriever("pass") }, - keysImportRole: "snapshot", - } - tempFileName := generateTempTestKeyFile(t, "") - defer os.Remove(tempFileName) - - err := k.keysImport(&cobra.Command{}, []string{tempFileName}) - require.Error(t, err) - require.Contains(t, err.Error(), "Must specify GUN") -} - -func TestKeyImportNoRole(t *testing.T) { - setUp(t) - k := &keyCommander{ - configGetter: func() (*viper.Viper, error) { return viper.New(), nil }, - getRetriever: func() notary.PassRetriever { return passphrase.ConstantRetriever("pass") }, - } - tempFileName := generateTempTestKeyFile(t, "") - defer os.Remove(tempFileName) - - err := k.keysImport(&cobra.Command{}, []string{tempFileName}) - require.Error(t, err) - require.Contains(t, err.Error(), "Could not infer role, and no role was specified for key") -} - func generateTempTestKeyFile(t *testing.T, role string) string { setUp(t) privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) diff --git a/cmd/notary/main_test.go b/cmd/notary/main_test.go index f0bcc1eb3..40e3793b5 100644 --- a/cmd/notary/main_test.go +++ b/cmd/notary/main_test.go @@ -159,10 +159,6 @@ var exampleValidCommands = []string{ "key list", "key rotate repo snapshot", "key generate rsa", - "key backup tempfile.zip", - "key export e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 backup.pem", - "key restore tempfile.zip", - "key import backup.pem", "key remove e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "key passwd e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "delegation list repo", diff --git a/cryptoservice/crypto_service.go b/cryptoservice/crypto_service.go index 7c32ba720..f86aee23c 100644 --- a/cryptoservice/crypto_service.go +++ b/cryptoservice/crypto_service.go @@ -4,6 +4,9 @@ import ( "crypto/rand" "fmt" + "crypto/x509" + "encoding/pem" + "errors" "github.com/Sirupsen/logrus" "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" @@ -13,6 +16,16 @@ const ( rsaKeySize = 2048 // Used for snapshots and targets keys ) +var ( + // ErrNoValidPrivateKey is returned if a key being imported doesn't + // look like a private key + ErrNoValidPrivateKey = errors.New("no valid private key found") + + // ErrRootKeyNotEncrypted is returned if a root key being imported is + // unencrypted + ErrRootKeyNotEncrypted = errors.New("only encrypted root keys may be imported") +) + // CryptoService implements Sign and Create, holding a specific GUN and keystore to // operate on type CryptoService struct { @@ -153,3 +166,18 @@ func (cs *CryptoService) ListAllKeys() map[string]string { } return res } + +// CheckRootKeyIsEncrypted makes sure the root key is encrypted. We have +// internal assumptions that depend on this. +func CheckRootKeyIsEncrypted(pemBytes []byte) error { + block, _ := pem.Decode(pemBytes) + if block == nil { + return ErrNoValidPrivateKey + } + + if !x509.IsEncryptedPEMBlock(block) { + return ErrRootKeyNotEncrypted + } + + return nil +} diff --git a/cryptoservice/import_export.go b/cryptoservice/import_export.go deleted file mode 100644 index f2d2c7438..000000000 --- a/cryptoservice/import_export.go +++ /dev/null @@ -1,313 +0,0 @@ -package cryptoservice - -import ( - "archive/zip" - "crypto/x509" - "encoding/pem" - "errors" - "io" - "io/ioutil" - "os" - "path/filepath" - "strings" - - "github.com/docker/notary" - "github.com/docker/notary/trustmanager" -) - -const zipMadeByUNIX = 3 << 8 - -var ( - // ErrNoValidPrivateKey is returned if a key being imported doesn't - // look like a private key - ErrNoValidPrivateKey = errors.New("no valid private key found") - - // ErrRootKeyNotEncrypted is returned if a root key being imported is - // unencrypted - ErrRootKeyNotEncrypted = errors.New("only encrypted root keys may be imported") - - // ErrNoKeysFoundForGUN is returned if no keys are found for the - // specified GUN during export - ErrNoKeysFoundForGUN = errors.New("no keys found for specified GUN") -) - -// ExportKey exports the specified private key to an io.Writer in PEM format. -// The key's existing encryption is preserved. -func (cs *CryptoService) ExportKey(dest io.Writer, keyID, role string) error { - var ( - pemBytes []byte - err error - ) - - for _, ks := range cs.keyStores { - pemBytes, err = ks.ExportKey(keyID) - if err != nil { - continue - } - } - if err != nil { - return err - } - - nBytes, err := dest.Write(pemBytes) - if err != nil { - return err - } - if nBytes != len(pemBytes) { - return errors.New("Unable to finish writing exported key.") - } - return nil -} - -// ExportKeyReencrypt exports the specified private key to an io.Writer in -// PEM format. The key is reencrypted with a new passphrase. -func (cs *CryptoService) ExportKeyReencrypt(dest io.Writer, keyID string, newPassphraseRetriever notary.PassRetriever) error { - privateKey, _, err := cs.GetPrivateKey(keyID) - if err != nil { - return err - } - - keyInfo, err := cs.GetKeyInfo(keyID) - if err != nil { - return err - } - - // Create temporary keystore to use as a staging area - tempBaseDir, err := ioutil.TempDir("", "notary-key-export-") - defer os.RemoveAll(tempBaseDir) - - tempKeyStore, err := trustmanager.NewKeyFileStore(tempBaseDir, newPassphraseRetriever) - if err != nil { - return err - } - - err = tempKeyStore.AddKey(keyInfo, privateKey) - if err != nil { - return err - } - - pemBytes, err := tempKeyStore.ExportKey(keyID) - if err != nil { - return err - } - nBytes, err := dest.Write(pemBytes) - if err != nil { - return err - } - if nBytes != len(pemBytes) { - return errors.New("Unable to finish writing exported key.") - } - return nil -} - -// ExportAllKeys exports all keys to an io.Writer in zip format. -// newPassphraseRetriever will be used to obtain passphrases to use to encrypt the existing keys. -func (cs *CryptoService) ExportAllKeys(dest io.Writer, newPassphraseRetriever notary.PassRetriever) error { - tempBaseDir, err := ioutil.TempDir("", "notary-key-export-") - defer os.RemoveAll(tempBaseDir) - - // Create temporary keystore to use as a staging area - tempKeyStore, err := trustmanager.NewKeyFileStore(tempBaseDir, newPassphraseRetriever) - if err != nil { - return err - } - - for _, ks := range cs.keyStores { - if err := moveKeys(ks, tempKeyStore); err != nil { - return err - } - } - - zipWriter := zip.NewWriter(dest) - - if err := addKeysToArchive(zipWriter, tempKeyStore); err != nil { - return err - } - - zipWriter.Close() - - return nil -} - -// ImportKeysZip imports keys from a zip file provided as an zip.Reader. The -// keys in the root_keys directory are left encrypted, but the other keys are -// decrypted with the specified passphrase. -func (cs *CryptoService) ImportKeysZip(zipReader zip.Reader, retriever notary.PassRetriever) error { - // Temporarily store the keys in maps, so we can bail early if there's - // an error (for example, wrong passphrase), without leaving the key - // store in an inconsistent state - newKeys := make(map[string][]byte) - - // Iterate through the files in the archive. Don't add the keys - for _, f := range zipReader.File { - fNameTrimmed := strings.TrimSuffix(f.Name, filepath.Ext(f.Name)) - rc, err := f.Open() - if err != nil { - return err - } - defer rc.Close() - - fileBytes, err := ioutil.ReadAll(rc) - if err != nil { - return nil - } - - // Note that using / as a separator is okay here - the zip - // package guarantees that the separator will be / - if fNameTrimmed[len(fNameTrimmed)-5:] == "_root" { - if err = CheckRootKeyIsEncrypted(fileBytes); err != nil { - return err - } - } - newKeys[fNameTrimmed] = fileBytes - } - - for keyName, pemBytes := range newKeys { - // Get the key role information as well as its data.PrivateKey representation - _, keyInfo, err := trustmanager.KeyInfoFromPEM(pemBytes, keyName) - if err != nil { - return err - } - privKey, err := trustmanager.ParsePEMPrivateKey(pemBytes, "") - if err != nil { - privKey, _, err = trustmanager.GetPasswdDecryptBytes(retriever, pemBytes, "", "imported "+keyInfo.Role) - if err != nil { - return err - } - } - // Add the key to our cryptoservice, will add to the first successful keystore - if err = cs.AddKey(keyInfo.Role, keyInfo.Gun, privKey); err != nil { - return err - } - } - - return nil -} - -// ExportKeysByGUN exports all keys associated with a specified GUN to an -// io.Writer in zip format. passphraseRetriever is used to select new passphrases to use to -// encrypt the keys. -func (cs *CryptoService) ExportKeysByGUN(dest io.Writer, gun string, passphraseRetriever notary.PassRetriever) error { - tempBaseDir, err := ioutil.TempDir("", "notary-key-export-") - defer os.RemoveAll(tempBaseDir) - - // Create temporary keystore to use as a staging area - tempKeyStore, err := trustmanager.NewKeyFileStore(tempBaseDir, passphraseRetriever) - if err != nil { - return err - } - - for _, ks := range cs.keyStores { - if err := moveKeysByGUN(ks, tempKeyStore, gun); err != nil { - return err - } - } - - zipWriter := zip.NewWriter(dest) - - if len(tempKeyStore.ListKeys()) == 0 { - return ErrNoKeysFoundForGUN - } - - if err := addKeysToArchive(zipWriter, tempKeyStore); err != nil { - return err - } - - zipWriter.Close() - - return nil -} - -func moveKeysByGUN(oldKeyStore, newKeyStore trustmanager.KeyStore, gun string) error { - for keyID, keyInfo := range oldKeyStore.ListKeys() { - // Skip keys that aren't associated with this GUN - if keyInfo.Gun != gun { - continue - } - - privKey, _, err := oldKeyStore.GetKey(keyID) - if err != nil { - return err - } - - err = newKeyStore.AddKey(keyInfo, privKey) - if err != nil { - return err - } - } - - return nil -} - -func moveKeys(oldKeyStore, newKeyStore trustmanager.KeyStore) error { - for keyID, keyInfo := range oldKeyStore.ListKeys() { - privateKey, _, err := oldKeyStore.GetKey(keyID) - if err != nil { - return err - } - - err = newKeyStore.AddKey(keyInfo, privateKey) - - if err != nil { - return err - } - } - - return nil -} - -func addKeysToArchive(zipWriter *zip.Writer, newKeyStore *trustmanager.KeyFileStore) error { - for _, relKeyPath := range newKeyStore.ListFiles() { - fullKeyPath, err := newKeyStore.GetPath(relKeyPath) - if err != nil { - return err - } - - fi, err := os.Lstat(fullKeyPath) - if err != nil { - return err - } - - infoHeader, err := zip.FileInfoHeader(fi) - if err != nil { - return err - } - - relPath, err := filepath.Rel(newKeyStore.BaseDir(), fullKeyPath) - if err != nil { - return err - } - infoHeader.Name = relPath - - zipFileEntryWriter, err := zipWriter.CreateHeader(infoHeader) - if err != nil { - return err - } - - fileContents, err := ioutil.ReadFile(fullKeyPath) - if err != nil { - return err - } - - if _, err = zipFileEntryWriter.Write(fileContents); err != nil { - return err - } - } - - return nil -} - -// CheckRootKeyIsEncrypted makes sure the root key is encrypted. We have -// internal assumptions that depend on this. -func CheckRootKeyIsEncrypted(pemBytes []byte) error { - block, _ := pem.Decode(pemBytes) - if block == nil { - return ErrNoValidPrivateKey - } - - if !x509.IsEncryptedPEMBlock(block) { - return ErrRootKeyNotEncrypted - } - - return nil -} diff --git a/cryptoservice/import_export_compatibility_test.go b/cryptoservice/import_export_compatibility_test.go deleted file mode 100644 index d398e4f8f..000000000 --- a/cryptoservice/import_export_compatibility_test.go +++ /dev/null @@ -1,164 +0,0 @@ -// Ensures we can import/export old-style repos - -package cryptoservice - -import ( - "archive/zip" - "io/ioutil" - "os" - "testing" - - "github.com/docker/notary" - "github.com/docker/notary/passphrase" - "github.com/docker/notary/trustmanager" - "github.com/docker/notary/tuf/data" - "github.com/stretchr/testify/require" -) - -// Zips up the keys in the old repo, and assert that we can import it and use -// said keys. The 0.1 exported format is just a zip file of all the keys -func TestImport0Dot1Zip(t *testing.T) { - ks, ret, _ := get0Dot1(t) - - zipFile, err := ioutil.TempFile("", "notary-test-zipFile") - defer os.RemoveAll(zipFile.Name()) - zipWriter := zip.NewWriter(zipFile) - require.NoError(t, err) - require.NoError(t, addKeysToArchive(zipWriter, ks)) - zipWriter.Close() - zipFile.Close() - - origKeys := make(map[string]string) - for keyID, keyInfo := range ks.ListKeys() { - origKeys[keyID] = keyInfo.Role - } - require.Len(t, origKeys, 3) - - // now import the zip file into a new cryptoservice - - tempDir, err := ioutil.TempDir("", "notary-test-import") - defer os.RemoveAll(tempDir) - require.NoError(t, err) - - ks, err = trustmanager.NewKeyFileStore(tempDir, ret) - require.NoError(t, err) - cs := NewCryptoService(ks) - - zipReader, err := zip.OpenReader(zipFile.Name()) - require.NoError(t, err) - defer zipReader.Close() - - require.NoError(t, cs.ImportKeysZip(zipReader.Reader, passphrase.ConstantRetriever("randompass"))) - assertHasKeys(t, cs, origKeys) -} - -func get0Dot1(t *testing.T) (*trustmanager.KeyFileStore, notary.PassRetriever, string) { - gun := "docker.com/notary0.1/samplerepo" - ret := passphrase.ConstantRetriever("randompass") - - // produce the zip file - ks, err := trustmanager.NewKeyFileStore("../fixtures/compatibility/notary0.1", ret) - require.NoError(t, err) - - return ks, ret, gun -} - -// Given a map of key IDs to roles, asserts that the cryptoService has all and -// only those keys -func assertHasKeys(t *testing.T, cs *CryptoService, expectedKeys map[string]string) { - keys := cs.ListAllKeys() - require.Len(t, keys, len(expectedKeys)) - - for keyID, role := range keys { - expectedRole, ok := expectedKeys[keyID] - require.True(t, ok) - require.Equal(t, expectedRole, role) - } -} - -// Export all the keys of a cryptoservice to a zipfile, and import it into a -// new cryptoService, and return that new cryptoService -func importExportedZip(t *testing.T, original *CryptoService, - ret notary.PassRetriever, gun string) (*CryptoService, string) { - - // Temporary directory where test files will be created - tempBaseDir, err := ioutil.TempDir("", "notary-test-") - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - ks, err := trustmanager.NewKeyFileStore(tempBaseDir, ret) - require.NoError(t, err) - var cs *CryptoService - - // export keys - zipFile, err := ioutil.TempFile("", "notary-test-zipFile") - defer os.RemoveAll(zipFile.Name()) - if gun != "" { - err = original.ExportKeysByGUN(zipFile, gun, ret) - require.NoError(t, err) - cs = NewCryptoService(ks) - } else { - err = original.ExportAllKeys(zipFile, ret) - require.NoError(t, err) - cs = NewCryptoService(ks) - } - zipFile.Close() - - // import keys into the cryptoservice now - zipReader, err := zip.OpenReader(zipFile.Name()) - require.NoError(t, err) - defer zipReader.Close() - - require.NoError(t, cs.ImportKeysZip(zipReader.Reader, passphrase.ConstantRetriever("randompass"))) - return cs, tempBaseDir -} - -func TestImportExport0Dot1AllKeys(t *testing.T) { - ks, ret, _ := get0Dot1(t) - cs := NewCryptoService(ks) - - newCS, tempDir := importExportedZip(t, cs, ret, "") - defer os.RemoveAll(tempDir) - - assertHasKeys(t, newCS, cs.ListAllKeys()) -} - -func TestImportExport0Dot1GUNKeys(t *testing.T) { - ks, ret, gun := get0Dot1(t) - - // remove root from expected key list, because root is not exported when - // we export by gun - expectedKeys := make(map[string]string) - for keyID, keyInfo := range ks.ListKeys() { - if keyInfo.Role != data.CanonicalRootRole { - expectedKeys[keyID] = keyInfo.Role - } - } - - // make some other temp directory to create new keys in - tempDir, err := ioutil.TempDir("", "notary-tests-keystore") - defer os.RemoveAll(tempDir) - require.NoError(t, err) - - otherKS, err := trustmanager.NewKeyFileStore(tempDir, ret) - require.NoError(t, err) - cs := NewCryptoService(otherKS, ks) - - // create a keys that is not of the same GUN, and be sure it's in this - // CryptoService - otherPubKey, err := cs.Create(data.CanonicalTargetsRole, "some/other/gun", data.ECDSAKey) - require.NoError(t, err) - - k, _, err := cs.GetPrivateKey(otherPubKey.ID()) - require.NoError(t, err) - require.NotNil(t, k) - - // export/import, and ensure that the other-gun key is not in the new - // CryptoService - newCS, tempDir := importExportedZip(t, cs, ret, gun) - defer os.RemoveAll(tempDir) - - assertHasKeys(t, newCS, expectedKeys) - - _, _, err = newCS.GetPrivateKey(otherPubKey.ID()) - require.Error(t, err) -} diff --git a/cryptoservice/import_export_test.go b/cryptoservice/import_export_test.go deleted file mode 100644 index d4777292d..000000000 --- a/cryptoservice/import_export_test.go +++ /dev/null @@ -1,491 +0,0 @@ -package cryptoservice - -import ( - "archive/zip" - "fmt" - "io/ioutil" - "net/http" - "net/http/httptest" - "os" - "path/filepath" - "testing" - - "github.com/docker/notary" - "github.com/docker/notary/trustmanager" - "github.com/docker/notary/tuf/data" - "github.com/stretchr/testify/require" -) - -const timestampECDSAKeyJSON = ` -{"keytype":"ecdsa","keyval":{"public":"MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEgl3rzMPMEKhS1k/AX16MM4PdidpjJr+z4pj0Td+30QnpbOIARgpyR1PiFztU8BZlqG3cUazvFclr2q/xHvfrqw==","private":"MHcCAQEEIDqtcdzU7H3AbIPSQaxHl9+xYECt7NpK7B1+6ep5cv9CoAoGCCqGSM49AwEHoUQDQgAEgl3rzMPMEKhS1k/AX16MM4PdidpjJr+z4pj0Td+30QnpbOIARgpyR1PiFztU8BZlqG3cUazvFclr2q/xHvfrqw=="}}` - -func createTestServer(t *testing.T) (*httptest.Server, *http.ServeMux) { - mux := http.NewServeMux() - // TUF will request /v2/docker.com/notary/_trust/tuf/timestamp.key - // Return a canned timestamp.key - mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/timestamp.key", func(w http.ResponseWriter, r *http.Request) { - // Also contains the private key, but for the purpose of this - // test, we don't care - fmt.Fprint(w, timestampECDSAKeyJSON) - }) - - ts := httptest.NewServer(mux) - - return ts, mux -} - -var oldPassphrase = "oldPassphrase" -var exportPassphrase = "exportPassphrase" -var oldPassphraseRetriever = func(string, string, bool, int) (string, bool, error) { return oldPassphrase, false, nil } -var newPassphraseRetriever = func(string, string, bool, int) (string, bool, error) { return exportPassphrase, false, nil } - -func TestImportExportZip(t *testing.T) { - gun := "docker.com/notary" - - // Temporary directory where test files will be created - tempBaseDir, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore, err := trustmanager.NewKeyFileStore(tempBaseDir, newPassphraseRetriever) - cs := NewCryptoService(fileStore) - pubKey, err := cs.Create(data.CanonicalRootRole, gun, data.ECDSAKey) - require.NoError(t, err) - - rootKeyID := pubKey.ID() - - tempZipFile, err := ioutil.TempFile("", "notary-test-export-") - tempZipFilePath := tempZipFile.Name() - defer os.Remove(tempZipFilePath) - - err = cs.ExportAllKeys(tempZipFile, newPassphraseRetriever) - tempZipFile.Close() - require.NoError(t, err) - - // Reopen the zip file for importing - zipReader, err := zip.OpenReader(tempZipFilePath) - require.NoError(t, err, "could not open zip file") - - // Map of files to expect in the zip file, with the passphrases - passphraseByFile := make(map[string]string) - - // Add non-root keys to the map. These should use the new passphrase - // because the passwords were chosen by the newPassphraseRetriever. - privKeyMap := cs.ListAllKeys() - for privKeyName := range privKeyMap { - _, alias, err := cs.GetPrivateKey(privKeyName) - require.NoError(t, err, "privKey %s has no alias", privKeyName) - - if alias == data.CanonicalRootRole { - continue - } - relKeyPath := filepath.Join(notary.NonRootKeysSubdir, privKeyName+".key") - passphraseByFile[relKeyPath] = exportPassphrase - } - - // Add root key to the map. This will use the export passphrase because it - // will be reencrypted. - relRootKey := filepath.Join(notary.RootKeysSubdir, rootKeyID+".key") - passphraseByFile[relRootKey] = exportPassphrase - - // Iterate through the files in the archive, checking that the files - // exist and are encrypted with the expected passphrase. - for _, f := range zipReader.File { - expectedPassphrase, present := passphraseByFile[f.Name] - require.True(t, present, "unexpected file %s in zip file", f.Name) - - delete(passphraseByFile, f.Name) - - rc, err := f.Open() - require.NoError(t, err, "could not open file inside zip archive") - - pemBytes, err := ioutil.ReadAll(rc) - require.NoError(t, err, "could not read file from zip") - - _, err = trustmanager.ParsePEMPrivateKey(pemBytes, expectedPassphrase) - require.NoError(t, err, "PEM not encrypted with the expected passphrase") - - rc.Close() - } - - zipReader.Close() - - // Are there any keys that didn't make it to the zip? - require.Len(t, passphraseByFile, 0) - - // Create new repo to test import - tempBaseDir2, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir2) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore2, err := trustmanager.NewKeyFileStore(tempBaseDir2, newPassphraseRetriever) - require.NoError(t, err) - cs2 := NewCryptoService(fileStore2) - - // Reopen the zip file for importing - zipReader, err = zip.OpenReader(tempZipFilePath) - require.NoError(t, err, "could not open zip file") - - // Now try with a valid passphrase. This time it should succeed. - err = cs2.ImportKeysZip(zipReader.Reader, newPassphraseRetriever) - require.NoError(t, err) - zipReader.Close() - - // Look for keys in private. The filenames should match the key IDs - // in the repo's private key store. - for privKeyName := range privKeyMap { - _, alias, err := cs2.GetPrivateKey(privKeyName) - require.NoError(t, err, "privKey %s has no alias", privKeyName) - - if alias == data.CanonicalRootRole { - continue - } - relKeyPath := filepath.Join(notary.NonRootKeysSubdir, privKeyName+".key") - privKeyFileName := filepath.Join(tempBaseDir2, notary.PrivDir, relKeyPath) - _, err = os.Stat(privKeyFileName) - require.NoError(t, err, "missing private key for role %s: %s", alias, privKeyName) - } - - // Look for keys in root_keys - // There should be a file named after the key ID of the root key we - // passed in. - rootKeyFilename := rootKeyID + ".key" - _, err = os.Stat(filepath.Join(tempBaseDir2, notary.PrivDir, notary.RootKeysSubdir, rootKeyFilename)) - require.NoError(t, err, "missing root key") -} - -func TestImportExportGUN(t *testing.T) { - gun := "docker.com/notary" - - // Temporary directory where test files will be created - tempBaseDir, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore, err := trustmanager.NewKeyFileStore(tempBaseDir, newPassphraseRetriever) - cs := NewCryptoService(fileStore) - _, err = cs.Create(data.CanonicalRootRole, gun, data.ECDSAKey) - _, err = cs.Create(data.CanonicalTargetsRole, gun, data.ECDSAKey) - _, err = cs.Create(data.CanonicalSnapshotRole, gun, data.ECDSAKey) - require.NoError(t, err) - - tempZipFile, err := ioutil.TempFile("", "notary-test-export-") - tempZipFilePath := tempZipFile.Name() - defer os.Remove(tempZipFilePath) - - err = cs.ExportKeysByGUN(tempZipFile, gun, newPassphraseRetriever) - require.NoError(t, err) - - // With an invalid GUN, this should return an error - err = cs.ExportKeysByGUN(tempZipFile, "does.not.exist/in/repository", newPassphraseRetriever) - require.EqualError(t, err, ErrNoKeysFoundForGUN.Error()) - - tempZipFile.Close() - - // Reopen the zip file for importing - zipReader, err := zip.OpenReader(tempZipFilePath) - require.NoError(t, err, "could not open zip file") - - // Map of files to expect in the zip file, with the passphrases - passphraseByFile := make(map[string]string) - - // Add keys non-root keys to the map. These should use the new passphrase - // because they were formerly unencrypted. - privKeyMap := cs.ListAllKeys() - for privKeyName := range privKeyMap { - _, alias, err := cs.GetPrivateKey(privKeyName) - require.NoError(t, err, "privKey %s has no alias", privKeyName) - if alias == data.CanonicalRootRole { - continue - } - relKeyPath := filepath.Join(notary.NonRootKeysSubdir, gun, privKeyName+".key") - - passphraseByFile[relKeyPath] = exportPassphrase - } - - // Iterate through the files in the archive, checking that the files - // exist and are encrypted with the expected passphrase. - for _, f := range zipReader.File { - - expectedPassphrase, present := passphraseByFile[f.Name] - require.True(t, present, "unexpected file %s in zip file", f.Name) - - delete(passphraseByFile, f.Name) - - rc, err := f.Open() - require.NoError(t, err, "could not open file inside zip archive") - - pemBytes, err := ioutil.ReadAll(rc) - require.NoError(t, err, "could not read file from zip") - - _, err = trustmanager.ParsePEMPrivateKey(pemBytes, expectedPassphrase) - require.NoError(t, err, "PEM not encrypted with the expected passphrase") - - rc.Close() - } - - zipReader.Close() - - // Are there any keys that didn't make it to the zip? - require.Len(t, passphraseByFile, 0) - - // Create new repo to test import - tempBaseDir2, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir2) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore2, err := trustmanager.NewKeyFileStore(tempBaseDir2, newPassphraseRetriever) - cs2 := NewCryptoService(fileStore2) - - // Reopen the zip file for importing - zipReader, err = zip.OpenReader(tempZipFilePath) - require.NoError(t, err, "could not open zip file") - - // Now try with a valid passphrase. This time it should succeed. - err = cs2.ImportKeysZip(zipReader.Reader, newPassphraseRetriever) - require.NoError(t, err) - zipReader.Close() - - // Look for keys in private. The filenames should match the key IDs - // in the repo's private key store. - for privKeyName, role := range privKeyMap { - if role == data.CanonicalRootRole { - continue - } - _, alias, err := cs2.GetPrivateKey(privKeyName) - require.NoError(t, err, "privKey %s has no alias", privKeyName) - if alias == data.CanonicalRootRole { - continue - } - relKeyPath := filepath.Join(notary.NonRootKeysSubdir, gun, privKeyName+".key") - privKeyFileName := filepath.Join(tempBaseDir2, notary.PrivDir, relKeyPath) - _, err = os.Stat(privKeyFileName) - require.NoError(t, err) - } -} - -func TestExportRootKey(t *testing.T) { - gun := "docker.com/notary" - - // Temporary directory where test files will be created - tempBaseDir, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore, err := trustmanager.NewKeyFileStore(tempBaseDir, oldPassphraseRetriever) - cs := NewCryptoService(fileStore) - pubKey, err := cs.Create(data.CanonicalRootRole, gun, data.ECDSAKey) - require.NoError(t, err) - - rootKeyID := pubKey.ID() - - tempKeyFile, err := ioutil.TempFile("", "notary-test-export-") - tempKeyFilePath := tempKeyFile.Name() - defer os.Remove(tempKeyFilePath) - - err = cs.ExportKey(tempKeyFile, rootKeyID, data.CanonicalRootRole) - require.NoError(t, err) - tempKeyFile.Close() - - // Create new repo to test import - tempBaseDir2, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir2) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore2, err := trustmanager.NewKeyFileStore(tempBaseDir2, oldPassphraseRetriever) - cs2 := NewCryptoService(fileStore2) - - keyReader, err := os.Open(tempKeyFilePath) - require.NoError(t, err, "could not open key file") - - pemImportBytes, err := ioutil.ReadAll(keyReader) - keyReader.Close() - require.NoError(t, err) - - // Convert to a data.PrivateKey, potentially decrypting the key, and add it to the cryptoservice - privKey, _, err := trustmanager.GetPasswdDecryptBytes(oldPassphraseRetriever, pemImportBytes, "", "imported "+data.CanonicalRootRole) - require.NoError(t, err) - err = cs2.AddKey(data.CanonicalRootRole, gun, privKey) - require.NoError(t, err) - - // Look for repo's root key in repo2 - // There should be a file named after the key ID of the root key we - // imported. - rootKeyFilename := rootKeyID + ".key" - _, err = os.Stat(filepath.Join(tempBaseDir2, notary.PrivDir, notary.RootKeysSubdir, rootKeyFilename)) - require.NoError(t, err, "missing root key") -} - -func TestExportRootKeyReencrypt(t *testing.T) { - gun := "docker.com/notary" - - // Temporary directory where test files will be created - tempBaseDir, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore, err := trustmanager.NewKeyFileStore(tempBaseDir, oldPassphraseRetriever) - cs := NewCryptoService(fileStore) - pubKey, err := cs.Create(data.CanonicalRootRole, gun, data.ECDSAKey) - require.NoError(t, err) - - rootKeyID := pubKey.ID() - - tempKeyFile, err := ioutil.TempFile("", "notary-test-export-") - tempKeyFilePath := tempKeyFile.Name() - defer os.Remove(tempKeyFilePath) - - err = cs.ExportKeyReencrypt(tempKeyFile, rootKeyID, newPassphraseRetriever) - require.NoError(t, err) - tempKeyFile.Close() - - // Create new repo to test import - tempBaseDir2, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir2) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore2, err := trustmanager.NewKeyFileStore(tempBaseDir2, newPassphraseRetriever) - cs2 := NewCryptoService(fileStore2) - - keyReader, err := os.Open(tempKeyFilePath) - require.NoError(t, err, "could not open key file") - - pemImportBytes, err := ioutil.ReadAll(keyReader) - keyReader.Close() - require.NoError(t, err) - - // Convert to a data.PrivateKey, potentially decrypting the key, and add it to the cryptoservice - privKey, _, err := trustmanager.GetPasswdDecryptBytes(newPassphraseRetriever, pemImportBytes, "", "imported "+data.CanonicalRootRole) - require.NoError(t, err) - err = cs2.AddKey(data.CanonicalRootRole, gun, privKey) - require.NoError(t, err) - - // Look for repo's root key in repo2 - // There should be a file named after the key ID of the root key we - // imported. - rootKeyFilename := rootKeyID + ".key" - _, err = os.Stat(filepath.Join(tempBaseDir2, notary.PrivDir, notary.RootKeysSubdir, rootKeyFilename)) - require.NoError(t, err, "missing root key") - - // Should be able to unlock the root key with the new password - key, alias, err := cs2.GetPrivateKey(rootKeyID) - require.NoError(t, err, "could not unlock root key") - require.Equal(t, data.CanonicalRootRole, alias) - require.Equal(t, rootKeyID, key.ID()) -} - -func TestExportNonRootKey(t *testing.T) { - gun := "docker.com/notary" - - // Temporary directory where test files will be created - tempBaseDir, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore, err := trustmanager.NewKeyFileStore(tempBaseDir, oldPassphraseRetriever) - cs := NewCryptoService(fileStore) - pubKey, err := cs.Create(data.CanonicalTargetsRole, gun, data.ECDSAKey) - require.NoError(t, err) - - targetsKeyID := pubKey.ID() - - tempKeyFile, err := ioutil.TempFile("", "notary-test-export-") - tempKeyFilePath := tempKeyFile.Name() - defer os.Remove(tempKeyFilePath) - - err = cs.ExportKey(tempKeyFile, targetsKeyID, data.CanonicalTargetsRole) - require.NoError(t, err) - tempKeyFile.Close() - - // Create new repo to test import - tempBaseDir2, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir2) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore2, err := trustmanager.NewKeyFileStore(tempBaseDir2, oldPassphraseRetriever) - cs2 := NewCryptoService(fileStore2) - - keyReader, err := os.Open(tempKeyFilePath) - require.NoError(t, err, "could not open key file") - - pemBytes, err := ioutil.ReadAll(keyReader) - require.NoError(t, err, "could not read key file") - - // Convert to a data.PrivateKey, potentially decrypting the key, and add it to the cryptoservice - privKey, _, err := trustmanager.GetPasswdDecryptBytes(oldPassphraseRetriever, pemBytes, "", "imported "+data.CanonicalTargetsRole) - require.NoError(t, err) - err = cs2.AddKey(data.CanonicalTargetsRole, gun, privKey) - require.NoError(t, err) - keyReader.Close() - - // Look for repo's targets key in repo2 - // There should be a file named after the key ID of the targets key we - // imported. - targetsKeyFilename := targetsKeyID + ".key" - _, err = os.Stat(filepath.Join(tempBaseDir2, notary.PrivDir, notary.NonRootKeysSubdir, "docker.com/notary", targetsKeyFilename)) - require.NoError(t, err, "missing targets key") - - // Check that the key is the same - key, alias, err := cs2.GetPrivateKey(targetsKeyID) - require.NoError(t, err, "could not unlock targets key") - require.Equal(t, data.CanonicalTargetsRole, alias) - require.Equal(t, targetsKeyID, key.ID()) -} - -func TestExportNonRootKeyReencrypt(t *testing.T) { - gun := "docker.com/notary" - - // Temporary directory where test files will be created - tempBaseDir, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore, err := trustmanager.NewKeyFileStore(tempBaseDir, oldPassphraseRetriever) - cs := NewCryptoService(fileStore) - pubKey, err := cs.Create(data.CanonicalSnapshotRole, gun, data.ECDSAKey) - require.NoError(t, err) - - snapshotKeyID := pubKey.ID() - - tempKeyFile, err := ioutil.TempFile("", "notary-test-export-") - tempKeyFilePath := tempKeyFile.Name() - defer os.Remove(tempKeyFilePath) - - err = cs.ExportKeyReencrypt(tempKeyFile, snapshotKeyID, newPassphraseRetriever) - require.NoError(t, err) - tempKeyFile.Close() - - // Create new repo to test import - tempBaseDir2, err := ioutil.TempDir("", "notary-test-") - defer os.RemoveAll(tempBaseDir2) - require.NoError(t, err, "failed to create a temporary directory: %s", err) - - fileStore2, err := trustmanager.NewKeyFileStore(tempBaseDir2, newPassphraseRetriever) - cs2 := NewCryptoService(fileStore2) - - keyReader, err := os.Open(tempKeyFilePath) - require.NoError(t, err, "could not open key file") - - pemBytes, err := ioutil.ReadAll(keyReader) - require.NoError(t, err, "could not read key file") - - // Convert to a data.PrivateKey, potentially decrypting the key, and add it to the cryptoservice - privKey, _, err := trustmanager.GetPasswdDecryptBytes(newPassphraseRetriever, pemBytes, "", "imported "+data.CanonicalSnapshotRole) - require.NoError(t, err) - err = cs2.AddKey(data.CanonicalSnapshotRole, gun, privKey) - require.NoError(t, err) - keyReader.Close() - - // Look for repo's snapshot key in repo2 - // There should be a file named after the key ID of the snapshot key we - // imported. - snapshotKeyFilename := snapshotKeyID + ".key" - _, err = os.Stat(filepath.Join(tempBaseDir2, notary.PrivDir, notary.NonRootKeysSubdir, "docker.com/notary", snapshotKeyFilename)) - require.NoError(t, err, "missing snapshot key") - - // Should be able to unlock the root key with the new password - key, alias, err := cs2.GetPrivateKey(snapshotKeyID) - require.NoError(t, err, "could not unlock snapshot key") - require.Equal(t, data.CanonicalSnapshotRole, alias) - require.Equal(t, snapshotKeyID, key.ID()) -} diff --git a/signer/keydbstore/keydbstore.go b/signer/keydbstore/keydbstore.go index 46ec822bf..5b519d088 100644 --- a/signer/keydbstore/keydbstore.go +++ b/signer/keydbstore/keydbstore.go @@ -1,7 +1,6 @@ package keydbstore import ( - "errors" "fmt" "sync" @@ -214,11 +213,6 @@ func (s *KeyDBStore) RotateKeyPassphrase(keyID, newPassphraseAlias string) error return nil } -// ExportKey is currently unimplemented and will always return an error -func (s *KeyDBStore) ExportKey(keyID string) ([]byte, error) { - return nil, errors.New("Exporting from a KeyDBStore is not supported.") -} - // HealthCheck verifies that DB exists and is query-able func (s *KeyDBStore) HealthCheck() error { dbPrivateKey := GormPrivateKey{} diff --git a/signer/keydbstore/rethink_keydbstore.go b/signer/keydbstore/rethink_keydbstore.go index 0ea12c346..bdbc37c05 100644 --- a/signer/keydbstore/rethink_keydbstore.go +++ b/signer/keydbstore/rethink_keydbstore.go @@ -2,7 +2,6 @@ package keydbstore import ( "encoding/json" - "errors" "fmt" "sync" "time" @@ -273,11 +272,6 @@ func (rdb RethinkDBKeyStore) RotateKeyPassphrase(name, newPassphraseAlias string return nil } -// ExportKey is currently unimplemented and will always return an error -func (rdb RethinkDBKeyStore) ExportKey(keyID string) ([]byte, error) { - return nil, errors.New("Exporting from a RethinkDBKeyStore is not supported.") -} - // Bootstrap sets up the database and tables, also creating the notary signer user with appropriate db permission func (rdb RethinkDBKeyStore) Bootstrap() error { if err := rethinkdb.SetupDB(rdb.sess, rdb.dbName, []rethinkdb.Table{ diff --git a/trustmanager/keyfilestore.go b/trustmanager/keyfilestore.go index afec8b22f..c82af3e86 100644 --- a/trustmanager/keyfilestore.go +++ b/trustmanager/keyfilestore.go @@ -191,18 +191,6 @@ func (s *KeyFileStore) RemoveKey(keyID string) error { return nil } -// ExportKey exports the encrypted bytes from the keystore -func (s *KeyFileStore) ExportKey(keyID string) ([]byte, error) { - if keyInfo, ok := s.keyInfoMap[keyID]; ok { - keyID = filepath.Join(keyInfo.Gun, keyID) - } - keyBytes, _, err := getRawKey(s, keyID) - if err != nil { - return nil, err - } - return keyBytes, nil -} - // NewKeyMemoryStore returns a new KeyMemoryStore which holds keys in memory func NewKeyMemoryStore(passphraseRetriever notary.PassRetriever) *KeyMemoryStore { memStore := NewMemoryFileStore() @@ -285,15 +273,6 @@ func (s *KeyMemoryStore) RemoveKey(keyID string) error { return nil } -// ExportKey exports the encrypted bytes from the keystore -func (s *KeyMemoryStore) ExportKey(keyID string) ([]byte, error) { - keyBytes, _, err := getRawKey(s, keyID) - if err != nil { - return nil, err - } - return keyBytes, nil -} - // KeyInfoFromPEM attempts to get a keyID and KeyInfo from the filename and PEM bytes of a key func KeyInfoFromPEM(pemBytes []byte, filename string) (string, KeyInfo, error) { keyID, role, gun := inferKeyInfoFromKeyPath(filename) diff --git a/trustmanager/keyfilestore_test.go b/trustmanager/keyfilestore_test.go index 5981d1016..82e4f6412 100644 --- a/trustmanager/keyfilestore_test.go +++ b/trustmanager/keyfilestore_test.go @@ -660,75 +660,3 @@ func TestKeysAreCached(t *testing.T) { } require.Equal(t, 2, numTimesCalled, "numTimesCalled should be 2 -- no additional call to passphraseRetriever") } - -// Exporting a key is successful (it is a valid key) -func TestKeyFileStoreExportSuccess(t *testing.T) { - // Generate a new Private Key - privKey, err := GenerateECDSAKey(rand.Reader) - require.NoError(t, err) - - // Temporary directory where test files will be created - tempBaseDir, err := ioutil.TempDir("", "notary-test-") - require.NoError(t, err) - defer os.RemoveAll(tempBaseDir) - - // Create our FileStore and add the key - store, err := NewKeyFileStore(tempBaseDir, passphraseRetriever) - require.NoError(t, err) - err = store.AddKey(KeyInfo{Role: data.CanonicalRootRole, Gun: ""}, privKey) - require.NoError(t, err) - - assertExportKeySuccess(t, store, privKey) -} - -// Exporting a key that doesn't exist fails (it is a valid key) -func TestKeyFileStoreExportNonExistantFailure(t *testing.T) { - // Temporary directory where test files will be created - tempBaseDir, err := ioutil.TempDir("", "notary-test-") - require.NoError(t, err) - defer os.RemoveAll(tempBaseDir) - - // Create empty FileStore - store, err := NewKeyFileStore(tempBaseDir, passphraseRetriever) - require.NoError(t, err) - - _, err = store.ExportKey("12345") - require.Error(t, err) -} - -// Exporting a key is successful (it is a valid key) -func TestKeyMemoryStoreExportSuccess(t *testing.T) { - // Generate a new Private Key - privKey, err := GenerateECDSAKey(rand.Reader) - require.NoError(t, err) - - // Create our MemoryStore and add key to it - store := NewKeyMemoryStore(passphraseRetriever) - require.NoError(t, err) - err = store.AddKey(KeyInfo{Role: data.CanonicalRootRole, Gun: ""}, privKey) - require.NoError(t, err) - - assertExportKeySuccess(t, store, privKey) -} - -// Exporting a key that doesn't exist fails (it is a valid key) -func TestKeyMemoryStoreExportNonExistantFailure(t *testing.T) { - store := NewKeyMemoryStore(passphraseRetriever) - _, err := store.ExportKey("12345") - require.Error(t, err) -} - -// Given a keystore and expected key that is in the store, export the key -// and assert that the exported key is the same and encrypted with the right -// password. -func assertExportKeySuccess( - t *testing.T, s KeyStore, expectedKey data.PrivateKey) { - - pemBytes, err := s.ExportKey(expectedKey.ID()) - require.NoError(t, err) - - reparsedKey, err := ParsePEMPrivateKey(pemBytes, cannedPassphrase) - require.NoError(t, err) - require.Equal(t, expectedKey.Private(), reparsedKey.Private()) - require.Equal(t, expectedKey.Public(), reparsedKey.Public()) -} diff --git a/trustmanager/keystore.go b/trustmanager/keystore.go index 237f570bd..e620aa453 100644 --- a/trustmanager/keystore.go +++ b/trustmanager/keystore.go @@ -49,7 +49,6 @@ type KeyStore interface { GetKeyInfo(keyID string) (KeyInfo, error) ListKeys() map[string]KeyInfo RemoveKey(keyID string) error - ExportKey(keyID string) ([]byte, error) Name() string } diff --git a/trustmanager/yubikey/yubikeystore.go b/trustmanager/yubikey/yubikeystore.go index a45772863..86b4e718a 100644 --- a/trustmanager/yubikey/yubikeystore.go +++ b/trustmanager/yubikey/yubikeystore.go @@ -789,12 +789,6 @@ func (s *YubiStore) RemoveKey(keyID string) error { return err } -// ExportKey doesn't work, because you can't export data from a Yubikey -func (s *YubiStore) ExportKey(keyID string) ([]byte, error) { - logrus.Debugf("Attempting to export: %s key inside of YubiStore", keyID) - return nil, errors.New("Keys cannot be exported from a Yubikey.") -} - // GetKeyInfo is not yet implemented func (s *YubiStore) GetKeyInfo(keyID string) (trustmanager.KeyInfo, error) { return trustmanager.KeyInfo{}, fmt.Errorf("Not yet implemented") diff --git a/trustmanager/yubikey/yubikeystore_test.go b/trustmanager/yubikey/yubikeystore_test.go index e4885fc19..e9b120a0a 100644 --- a/trustmanager/yubikey/yubikeystore_test.go +++ b/trustmanager/yubikey/yubikeystore_test.go @@ -358,29 +358,6 @@ func TestYubiRemoveKey(t *testing.T) { } } -// One cannot export from hardware - it will not export from the backup -func TestYubiExportKeyFails(t *testing.T) { - if !IsAccessible() { - t.Skip("Must have Yubikey access.") - } - clearAllKeys(t) - - SetYubikeyKeyMode(KeymodeNone) - defer func() { - SetYubikeyKeyMode(KeymodeTouch | KeymodePinOnce) - }() - - store, err := NewYubiStore(trustmanager.NewKeyMemoryStore(ret), ret) - require.NoError(t, err) - - key, err := testAddKey(t, store) - require.NoError(t, err) - - _, err = store.ExportKey(key.ID()) - require.Error(t, err) - require.Equal(t, "Keys cannot be exported from a Yubikey.", err.Error()) -} - // If there are keys in the backup store but no keys in the Yubikey, // listing and getting cannot access the keys in the backup store func TestYubiListAndGetKeysIgnoresBackup(t *testing.T) { @@ -682,8 +659,6 @@ func TestYubiListKeyCleansUpOnError(t *testing.T) { ), false) } -// export key fails anyway, don't bother testing - func TestYubiSignCleansUpOnError(t *testing.T) { if !IsAccessible() { t.Skip("Must have Yubikey access.") From 635b90e2663e5cd118644652f39dad69aef549cb Mon Sep 17 00:00:00 2001 From: David Lawrence Date: Tue, 28 Jun 2016 22:48:06 -0700 Subject: [PATCH 2/3] unify KeyMemoryStore and KeyFileStore in trustmanager Signed-off-by: David Lawrence (github: endophage) --- .gitignore | 2 + client/backwards_compatibility_test.go | 4 +- client/client.go | 20 +- client/client_test.go | 12 +- client/client_update_test.go | 32 +- client/delegations.go | 2 +- client/helpers.go | 4 +- cmd/notary/delegations.go | 3 +- cmd/notary/delegations_test.go | 14 +- cmd/notary/integration_test.go | 38 +- cmd/notary/keys_test.go | 15 +- cmd/notary/prettyprint_test.go | 7 +- cryptoservice/certificate.go | 4 +- cryptoservice/certificate_test.go | 3 +- cryptoservice/crypto_service.go | 7 +- cryptoservice/crypto_service_test.go | 11 +- server/handlers/default_test.go | 2 +- server/integration_test.go | 4 +- server/server_test.go | 14 +- signer/client/signer_trust_test.go | 3 +- signer/keydbstore/keydbstore_test.go | 13 +- {tuf/store => storage}/errors.go | 13 +- storage/filestore.go | 215 ++++++++ {trustmanager => storage}/filestore_test.go | 213 ++++++-- {tuf/store => storage}/httpstore.go | 25 +- {tuf/store => storage}/httpstore_test.go | 127 +---- storage/interface.go | 8 - {tuf/store => storage}/interfaces.go | 22 +- {tuf/store => storage}/memorystore.go | 75 +-- {tuf/store => storage}/memorystore_test.go | 30 +- {tuf/store => storage}/offlinestore.go | 32 +- {tuf/store => storage}/offlinestore_test.go | 12 +- {tuf/store => storage}/store_test.go | 20 +- trustmanager/filestore.go | 150 ------ trustmanager/interfaces.go | 86 ++++ trustmanager/keyfilestore.go | 476 ------------------ trustmanager/keystore.go | 373 ++++++++++++-- ...{keyfilestore_test.go => keystore_test.go} | 29 +- trustmanager/memorystore.go | 54 -- trustmanager/store.go | 42 -- trustmanager/yubikey/yubikeystore.go | 3 +- trustmanager/yubikey/yubikeystore_test.go | 7 +- trustpinning/certs.go | 12 +- trustpinning/certs_test.go | 17 +- trustpinning/trustpin.go | 7 +- tuf/client/client.go | 14 +- tuf/signed/ed25519.go | 3 +- tuf/signed/sign_test.go | 7 +- tuf/signed/verifiers_test.go | 33 ++ tuf/store/filestore.go | 102 ---- tuf/store/filestore_test.go | 138 ----- tuf/testutils/corrupt_memorystore.go | 20 +- tuf/testutils/interfaces/cryptoservice.go | 14 +- tuf/testutils/repo.go | 5 +- tuf/testutils/swizzler.go | 60 +-- tuf/testutils/swizzler_test.go | 52 +- tuf/tuf_test.go | 11 +- tuf/utils/util.go | 109 ---- tuf/utils/util_test.go | 91 ---- tuf/utils/utils.go | 35 ++ .../x509utils.go => tuf/utils/x509.go | 212 ++++---- .../utils/x509_test.go | 12 +- utils/configuration_test.go | 6 +- 63 files changed, 1384 insertions(+), 1802 deletions(-) rename {tuf/store => storage}/errors.go (59%) create mode 100644 storage/filestore.go rename {trustmanager => storage}/filestore_test.go (55%) rename {tuf/store => storage}/httpstore.go (92%) rename {tuf/store => storage}/httpstore_test.go (56%) delete mode 100644 storage/interface.go rename {tuf/store => storage}/interfaces.go (61%) rename {tuf/store => storage}/memorystore.go (53%) rename {tuf/store => storage}/memorystore_test.go (64%) rename {tuf/store => storage}/offlinestore.go (52%) rename {tuf/store => storage}/offlinestore_test.go (74%) rename {tuf/store => storage}/store_test.go (66%) delete mode 100644 trustmanager/filestore.go create mode 100644 trustmanager/interfaces.go delete mode 100644 trustmanager/keyfilestore.go rename trustmanager/{keyfilestore_test.go => keystore_test.go} (97%) delete mode 100644 trustmanager/memorystore.go delete mode 100644 trustmanager/store.go delete mode 100644 tuf/store/filestore.go delete mode 100644 tuf/store/filestore_test.go delete mode 100644 tuf/utils/util.go delete mode 100644 tuf/utils/util_test.go rename trustmanager/x509utils.go => tuf/utils/x509.go (97%) rename trustmanager/x509utils_test.go => tuf/utils/x509_test.go (94%) diff --git a/.gitignore b/.gitignore index 8439935e6..7cf4a771e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +/.vscode /cmd/notary-server/notary-server /cmd/notary-server/local.config.json /cmd/notary-signer/local.config.json @@ -8,4 +9,5 @@ cross *.swp .idea *.iml +*.test coverage.out diff --git a/client/backwards_compatibility_test.go b/client/backwards_compatibility_test.go index de10b1286..08e65e479 100644 --- a/client/backwards_compatibility_test.go +++ b/client/backwards_compatibility_test.go @@ -13,9 +13,9 @@ import ( "time" "github.com/docker/notary/passphrase" + store "github.com/docker/notary/storage" "github.com/docker/notary/trustpinning" "github.com/docker/notary/tuf/data" - "github.com/docker/notary/tuf/store" "github.com/stretchr/testify/require" ) @@ -101,7 +101,7 @@ func Test0Dot1RepoFormat(t *testing.T) { // delete the timestamp metadata, since the server will ignore the uploaded // one and try to create a new one from scratch, which will be the wrong version - require.NoError(t, repo.fileStore.RemoveMeta(data.CanonicalTimestampRole)) + require.NoError(t, repo.fileStore.Remove(data.CanonicalTimestampRole)) // rotate the timestamp key, since the server doesn't have that one err = repo.RotateKey(data.CanonicalTimestampRole, true) diff --git a/client/client.go b/client/client.go index 318610689..0245c4a56 100644 --- a/client/client.go +++ b/client/client.go @@ -16,13 +16,13 @@ import ( "github.com/docker/notary" "github.com/docker/notary/client/changelist" "github.com/docker/notary/cryptoservice" + store "github.com/docker/notary/storage" "github.com/docker/notary/trustmanager" "github.com/docker/notary/trustpinning" "github.com/docker/notary/tuf" tufclient "github.com/docker/notary/tuf/client" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" - "github.com/docker/notary/tuf/store" "github.com/docker/notary/tuf/utils" ) @@ -159,7 +159,7 @@ func rootCertKey(gun string, privKey data.PrivateKey) (data.PublicKey, error) { return nil, err } - x509PublicKey := trustmanager.CertToKey(cert) + x509PublicKey := utils.CertToKey(cert) if x509PublicKey == nil { return nil, fmt.Errorf( "cannot use regenerated certificate: format %s", cert.PublicKeyAlgorithm) @@ -676,7 +676,7 @@ func (r *NotaryRepository) publish(cl changelist.Changelist) error { return err } - return remote.SetMultiMeta(updatedFiles) + return remote.SetMulti(updatedFiles) } // bootstrapRepo loads the repository from the local file system (i.e. @@ -690,7 +690,7 @@ func (r *NotaryRepository) bootstrapRepo() error { logrus.Debugf("Loading trusted collection.") for _, role := range data.BaseRoles { - jsonBytes, err := r.fileStore.GetMeta(role, store.NoSizeLimit) + jsonBytes, err := r.fileStore.GetSized(role, store.NoSizeLimit) if err != nil { if _, ok := err.(store.ErrMetaNotFound); ok && // server snapshots are supported, and server timestamp management @@ -722,7 +722,7 @@ func (r *NotaryRepository) saveMetadata(ignoreSnapshot bool) error { if err != nil { return err } - err = r.fileStore.SetMeta(data.CanonicalRootRole, rootJSON) + err = r.fileStore.Set(data.CanonicalRootRole, rootJSON) if err != nil { return err } @@ -743,7 +743,7 @@ func (r *NotaryRepository) saveMetadata(ignoreSnapshot bool) error { for role, blob := range targetsToSave { parentDir := filepath.Dir(role) os.MkdirAll(parentDir, 0755) - r.fileStore.SetMeta(role, blob) + r.fileStore.Set(role, blob) } if ignoreSnapshot { @@ -755,7 +755,7 @@ func (r *NotaryRepository) saveMetadata(ignoreSnapshot bool) error { return err } - return r.fileStore.SetMeta(data.CanonicalSnapshotRole, snapshotJSON) + return r.fileStore.Set(data.CanonicalSnapshotRole, snapshotJSON) } // returns a properly constructed ErrRepositoryNotExist error based on this @@ -825,7 +825,7 @@ func (r *NotaryRepository) bootstrapClient(checkInitialized bool) (*tufclient.Cl // during update which will cause us to download a new root and perform a rotation. // If we have an old root, and it's valid, then we overwrite the newBuilder to be one // preloaded with the old root or one which uses the old root for trust bootstrapping. - if rootJSON, err := r.fileStore.GetMeta(data.CanonicalRootRole, store.NoSizeLimit); err == nil { + if rootJSON, err := r.fileStore.GetSized(data.CanonicalRootRole, store.NoSizeLimit); err == nil { // if we can't load the cached root, fail hard because that is how we pin trust if err := oldBuilder.Load(data.CanonicalRootRole, rootJSON, minVersion, true); err != nil { return nil, err @@ -852,7 +852,7 @@ func (r *NotaryRepository) bootstrapClient(checkInitialized bool) (*tufclient.Cl // if remote store successfully set up, try and get root from remote // We don't have any local data to determine the size of root, so try the maximum (though it is restricted at 100MB) - tmpJSON, err := remote.GetMeta(data.CanonicalRootRole, store.NoSizeLimit) + tmpJSON, err := remote.GetSized(data.CanonicalRootRole, store.NoSizeLimit) if err != nil { // we didn't have a root in cache and were unable to load one from // the server. Nothing we can do but error. @@ -865,7 +865,7 @@ func (r *NotaryRepository) bootstrapClient(checkInitialized bool) (*tufclient.Cl return nil, err } - err = r.fileStore.SetMeta(data.CanonicalRootRole, tmpJSON) + err = r.fileStore.Set(data.CanonicalRootRole, tmpJSON) if err != nil { // if we can't write cache we should still continue, just log error logrus.Errorf("could not save root to cache: %s", err.Error()) diff --git a/client/client_test.go b/client/client_test.go index e0d9a39a9..0734c95b5 100644 --- a/client/client_test.go +++ b/client/client_test.go @@ -29,11 +29,11 @@ import ( "github.com/docker/notary/passphrase" "github.com/docker/notary/server" "github.com/docker/notary/server/storage" + store "github.com/docker/notary/storage" "github.com/docker/notary/trustmanager" "github.com/docker/notary/trustpinning" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" - "github.com/docker/notary/tuf/store" "github.com/docker/notary/tuf/utils" "github.com/docker/notary/tuf/validation" ) @@ -102,7 +102,7 @@ func simpleTestServer(t *testing.T, roles ...string) ( mux := http.NewServeMux() for _, role := range roles { - key, err := trustmanager.GenerateECDSAKey(rand.Reader) + key, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) keys[role] = key @@ -1539,7 +1539,7 @@ func testValidateRootKey(t *testing.T, rootType string) { for _, keyid := range keyids { key, ok := decodedRoot.Keys[keyid] require.True(t, ok, "key id not found in keys") - _, err := trustmanager.LoadCertFromPEM(key.Public()) + _, err := utils.LoadCertFromPEM(key.Public()) require.NoError(t, err, "key is not a valid cert") } } @@ -1957,7 +1957,7 @@ func testPublishBadMetadata(t *testing.T, roleName string, repo *NotaryRepositor addTarget(t, repo, "v1", "../fixtures/intermediate-ca.crt") // readable, but corrupt file - repo.fileStore.SetMeta(roleName, []byte("this isn't JSON")) + repo.fileStore.Set(roleName, []byte("this isn't JSON")) err := repo.Publish() if succeeds { require.NoError(t, err) @@ -2054,7 +2054,7 @@ func createKey(t *testing.T, repo *NotaryRepository, role string, x509 bool) dat privKey, role, start, start.AddDate(1, 0, 0), ) require.NoError(t, err) - return data.NewECDSAx509PublicKey(trustmanager.CertToPEM(cert)) + return data.NewECDSAx509PublicKey(utils.CertToPEM(cert)) } return key } @@ -2170,7 +2170,7 @@ func testPublishTargetsDelegationScopeFailIfNoKeys(t *testing.T, clearCache bool // generate a key that isn't in the cryptoservice, so we can't sign this // one - aPrivKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + aPrivKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "error generating key that is not in our cryptoservice") aPubKey := data.PublicKeyFromPrivate(aPrivKey) diff --git a/client/client_update_test.go b/client/client_update_test.go index ef9c3c0ed..94c2d96f4 100644 --- a/client/client_update_test.go +++ b/client/client_update_test.go @@ -17,10 +17,10 @@ import ( "github.com/docker/go/canonical/json" "github.com/docker/notary" "github.com/docker/notary/passphrase" + store "github.com/docker/notary/storage" "github.com/docker/notary/trustpinning" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" - "github.com/docker/notary/tuf/store" "github.com/docker/notary/tuf/testutils" "github.com/gorilla/mux" "github.com/stretchr/testify/require" @@ -66,7 +66,7 @@ func readOnlyServer(t *testing.T, cache store.MetadataStore, notFoundStatus int, m := mux.NewRouter() handler := func(w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) - metaBytes, err := cache.GetMeta(vars["role"], store.NoSizeLimit) + metaBytes, err := cache.GetSized(vars["role"], store.NoSizeLimit) if _, ok := err.(store.ErrMetaNotFound); ok { w.WriteHeader(notFoundStatus) } else { @@ -84,11 +84,11 @@ type unwritableStore struct { roleToNotWrite string } -func (u *unwritableStore) SetMeta(role string, serverMeta []byte) error { +func (u *unwritableStore) Set(role string, serverMeta []byte) error { if role == u.roleToNotWrite { return fmt.Errorf("Non-writable") } - return u.MetadataStore.SetMeta(role, serverMeta) + return u.MetadataStore.Set(role, serverMeta) } // Update can succeed even if we cannot write any metadata to the repo (assuming @@ -111,7 +111,7 @@ func TestUpdateSucceedsEvenIfCannotWriteNewRepo(t *testing.T) { require.NoError(t, err) for r, expected := range serverMeta { - actual, err := repo.fileStore.GetMeta(r, store.NoSizeLimit) + actual, err := repo.fileStore.GetSized(r, store.NoSizeLimit) if r == role { require.Error(t, err) require.IsType(t, store.ErrMetaNotFound{}, err, @@ -158,7 +158,7 @@ func TestUpdateSucceedsEvenIfCannotWriteExistingRepo(t *testing.T) { require.NoError(t, err) for r, expected := range serverMeta { - actual, err := repo.fileStore.GetMeta(r, store.NoSizeLimit) + actual, err := repo.fileStore.GetSized(r, store.NoSizeLimit) require.NoError(t, err, "problem getting repo metadata for %s", r) if role == r { require.False(t, bytes.Equal(expected, actual), @@ -244,12 +244,12 @@ func TestUpdateReplacesCorruptOrMissingMetadata(t *testing.T) { require.Error(t, err, "%s for %s: expected to error when bootstrapping root", text, role) // revert our original metadata for role := range origMeta { - require.NoError(t, repo.fileStore.SetMeta(role, origMeta[role])) + require.NoError(t, repo.fileStore.Set(role, origMeta[role])) } } else { require.NoError(t, err) for r, expected := range serverMeta { - actual, err := repo.fileStore.GetMeta(r, store.NoSizeLimit) + actual, err := repo.fileStore.GetSized(r, store.NoSizeLimit) require.NoError(t, err, "problem getting repo metadata for %s", role) require.True(t, bytes.Equal(expected, actual), "%s for %s: expected to recover after update", text, role) @@ -298,7 +298,7 @@ func TestUpdateFailsIfServerRootKeyChangedWithoutMultiSign(t *testing.T) { text, messItUp := expt.desc, expt.swizzle for _, forWrite := range []bool{true, false} { require.NoError(t, messItUp(repoSwizzler, data.CanonicalRootRole), "could not fuzz root (%s)", text) - messedUpMeta, err := repo.fileStore.GetMeta(data.CanonicalRootRole, store.NoSizeLimit) + messedUpMeta, err := repo.fileStore.GetSized(data.CanonicalRootRole, store.NoSizeLimit) if _, ok := err.(store.ErrMetaNotFound); ok { // one of the ways to mess up is to delete metadata @@ -307,7 +307,7 @@ func TestUpdateFailsIfServerRootKeyChangedWithoutMultiSign(t *testing.T) { require.NoError(t, err) // revert our original metadata for role := range origMeta { - require.NoError(t, repo.fileStore.SetMeta(role, origMeta[role])) + require.NoError(t, repo.fileStore.Set(role, origMeta[role])) } } else { @@ -321,7 +321,7 @@ func TestUpdateFailsIfServerRootKeyChangedWithoutMultiSign(t *testing.T) { // same because it has failed to update. for role, expected := range origMeta { if role != data.CanonicalTimestampRole && role != data.CanonicalSnapshotRole { - actual, err := repo.fileStore.GetMeta(role, store.NoSizeLimit) + actual, err := repo.fileStore.GetSized(role, store.NoSizeLimit) require.NoError(t, err, "problem getting repo metadata for %s", role) if role == data.CanonicalRootRole { @@ -336,7 +336,7 @@ func TestUpdateFailsIfServerRootKeyChangedWithoutMultiSign(t *testing.T) { // revert our original root metadata require.NoError(t, - repo.fileStore.SetMeta(data.CanonicalRootRole, origMeta[data.CanonicalRootRole])) + repo.fileStore.Set(data.CanonicalRootRole, origMeta[data.CanonicalRootRole])) } } } @@ -967,7 +967,7 @@ func waysToMessUpServerNonRootPerRole(t *testing.T) map[string][]swizzleExpectat keyIDs = append(keyIDs, k) } // add the keys from root too - rootMeta, err := s.MetadataCache.GetMeta(data.CanonicalRootRole, store.NoSizeLimit) + rootMeta, err := s.MetadataCache.GetSized(data.CanonicalRootRole, store.NoSizeLimit) require.NoError(t, err) signedRoot := &data.SignedRoot{} @@ -1349,7 +1349,7 @@ func signSerializeAndUpdateRoot(t *testing.T, signedRoot data.SignedRoot, require.NoError(t, signed.Sign(serverSwizzler.CryptoService, signedObj, keys, len(keys), nil)) rootBytes, err := json.Marshal(signedObj) require.NoError(t, err) - require.NoError(t, serverSwizzler.MetadataCache.SetMeta(data.CanonicalRootRole, rootBytes)) + require.NoError(t, serverSwizzler.MetadataCache.Set(data.CanonicalRootRole, rootBytes)) // update the hashes on both snapshot and timestamp require.NoError(t, serverSwizzler.UpdateSnapshotHashes()) @@ -1374,7 +1374,7 @@ func TestValidateRootRotationWithOldRole(t *testing.T) { // --- key is saved, but doesn't matter at all for rotation if we're already on // --- the root metadata with the 3 keys) - rootBytes, err := serverSwizzler.MetadataCache.GetMeta(data.CanonicalRootRole, store.NoSizeLimit) + rootBytes, err := serverSwizzler.MetadataCache.GetSized(data.CanonicalRootRole, store.NoSizeLimit) require.NoError(t, err) signedRoot := data.SignedRoot{} require.NoError(t, json.Unmarshal(rootBytes, &signedRoot)) @@ -1626,7 +1626,7 @@ func TestRootOnDiskTrustPinning(t *testing.T) { defer os.RemoveAll(repo.baseDir) repo.trustPinning = restrictiveTrustPinning // put root on disk - require.NoError(t, repo.fileStore.SetMeta(data.CanonicalRootRole, meta[data.CanonicalRootRole])) + require.NoError(t, repo.fileStore.Set(data.CanonicalRootRole, meta[data.CanonicalRootRole])) require.NoError(t, repo.Update(false)) } diff --git a/client/delegations.go b/client/delegations.go index 5925466e2..5fbee5af2 100644 --- a/client/delegations.go +++ b/client/delegations.go @@ -8,8 +8,8 @@ import ( "github.com/Sirupsen/logrus" "github.com/docker/notary" "github.com/docker/notary/client/changelist" + store "github.com/docker/notary/storage" "github.com/docker/notary/tuf/data" - "github.com/docker/notary/tuf/store" "github.com/docker/notary/tuf/utils" ) diff --git a/client/helpers.go b/client/helpers.go index bcd3291ca..734fe6107 100644 --- a/client/helpers.go +++ b/client/helpers.go @@ -9,9 +9,9 @@ import ( "github.com/Sirupsen/logrus" "github.com/docker/notary/client/changelist" - tuf "github.com/docker/notary/tuf" + store "github.com/docker/notary/storage" + "github.com/docker/notary/tuf" "github.com/docker/notary/tuf/data" - "github.com/docker/notary/tuf/store" "github.com/docker/notary/tuf/utils" ) diff --git a/cmd/notary/delegations.go b/cmd/notary/delegations.go index 034bf94c0..4a3b02cd6 100644 --- a/cmd/notary/delegations.go +++ b/cmd/notary/delegations.go @@ -8,7 +8,6 @@ import ( "github.com/docker/notary" notaryclient "github.com/docker/notary/client" - "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/utils" "github.com/spf13/cobra" @@ -238,7 +237,7 @@ func (d *delegationCommander) delegationAdd(cmd *cobra.Command, args []string) e } // Parse PEM bytes into type PublicKey - pubKey, err := trustmanager.ParsePEMPublicKey(pubKeyBytes) + pubKey, err := utils.ParsePEMPublicKey(pubKeyBytes) if err != nil { return fmt.Errorf("unable to parse valid public key certificate from PEM file %s: %v", pubKeyPath, err) } diff --git a/cmd/notary/delegations_test.go b/cmd/notary/delegations_test.go index e80f5ca46..399d9dcfb 100644 --- a/cmd/notary/delegations_test.go +++ b/cmd/notary/delegations_test.go @@ -9,7 +9,7 @@ import ( "time" "github.com/docker/notary/cryptoservice" - "github.com/docker/notary/trustmanager" + "github.com/docker/notary/tuf/utils" "github.com/spf13/viper" "github.com/stretchr/testify/require" ) @@ -35,7 +35,7 @@ func TestAddInvalidDelegationName(t *testing.T) { tempFile, err := ioutil.TempFile("/tmp", "pemfile") require.NoError(t, err) cert, _, err := generateValidTestCert() - _, err = tempFile.Write(trustmanager.CertToPEM(cert)) + _, err = tempFile.Write(utils.CertToPEM(cert)) require.NoError(t, err) tempFile.Close() defer os.Remove(tempFile.Name()) @@ -56,7 +56,7 @@ func TestAddInvalidDelegationCert(t *testing.T) { tempFile, err := ioutil.TempFile("/tmp", "pemfile") require.NoError(t, err) cert, _, err := generateExpiredTestCert() - _, err = tempFile.Write(trustmanager.CertToPEM(cert)) + _, err = tempFile.Write(utils.CertToPEM(cert)) require.NoError(t, err) tempFile.Close() defer os.Remove(tempFile.Name()) @@ -77,7 +77,7 @@ func TestAddInvalidShortPubkeyCert(t *testing.T) { tempFile, err := ioutil.TempFile("/tmp", "pemfile") require.NoError(t, err) cert, _, err := generateShortRSAKeyTestCert() - _, err = tempFile.Write(trustmanager.CertToPEM(cert)) + _, err = tempFile.Write(utils.CertToPEM(cert)) require.NoError(t, err) tempFile.Close() defer os.Remove(tempFile.Name()) @@ -142,7 +142,7 @@ func TestRemoveInvalidNumArgs(t *testing.T) { } func generateValidTestCert() (*x509.Certificate, string, error) { - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) if err != nil { return nil, "", err } @@ -157,7 +157,7 @@ func generateValidTestCert() (*x509.Certificate, string, error) { } func generateExpiredTestCert() (*x509.Certificate, string, error) { - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) if err != nil { return nil, "", err } @@ -174,7 +174,7 @@ func generateExpiredTestCert() (*x509.Certificate, string, error) { func generateShortRSAKeyTestCert() (*x509.Certificate, string, error) { // 1024 bits is too short - privKey, err := trustmanager.GenerateRSAKey(rand.Reader, 1024) + privKey, err := utils.GenerateRSAKey(rand.Reader, 1024) if err != nil { return nil, "", err } diff --git a/cmd/notary/integration_test.go b/cmd/notary/integration_test.go index 2c8841f3d..3b6761b84 100644 --- a/cmd/notary/integration_test.go +++ b/cmd/notary/integration_test.go @@ -100,9 +100,9 @@ func TestInitWithRootKey(t *testing.T) { // -- tests -- // create encrypted root key - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) - encryptedPEMPrivKey, err := trustmanager.EncryptPrivateKey(privKey, data.CanonicalRootRole, testPassphrase) + encryptedPEMPrivKey, err := utils.EncryptPrivateKey(privKey, data.CanonicalRootRole, testPassphrase) require.NoError(t, err) encryptedPEMKeyFilename := filepath.Join(tempDir, "encrypted_key.key") err = ioutil.WriteFile(encryptedPEMKeyFilename, encryptedPEMPrivKey, 0644) @@ -135,9 +135,9 @@ func TestInitWithRootKey(t *testing.T) { require.Error(t, err, "Init with non-PEM key should error") // check error if unencrypted PEM used - unencryptedPrivKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + unencryptedPrivKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) - unencryptedPEMPrivKey, err := trustmanager.KeyToPEM(unencryptedPrivKey, data.CanonicalRootRole) + unencryptedPEMPrivKey, err := utils.KeyToPEM(unencryptedPrivKey, data.CanonicalRootRole) require.NoError(t, err) unencryptedPEMKeyFilename := filepath.Join(tempDir, "unencrypted_key.key") err = ioutil.WriteFile(unencryptedPEMKeyFilename, unencryptedPEMPrivKey, 0644) @@ -148,9 +148,9 @@ func TestInitWithRootKey(t *testing.T) { // check error if invalid password used // instead of using a new retriever, we create a new key with a different pass - badPassPrivKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + badPassPrivKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) - badPassPEMPrivKey, err := trustmanager.EncryptPrivateKey(badPassPrivKey, data.CanonicalRootRole, "bad_pass") + badPassPEMPrivKey, err := utils.EncryptPrivateKey(badPassPrivKey, data.CanonicalRootRole, "bad_pass") require.NoError(t, err) badPassPEMKeyFilename := filepath.Join(tempDir, "badpass_key.key") err = ioutil.WriteFile(badPassPEMKeyFilename, badPassPEMPrivKey, 0644) @@ -160,9 +160,9 @@ func TestInitWithRootKey(t *testing.T) { require.Error(t, err, "Init with wrong password should error") // check error if wrong role specified - snapshotPrivKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + snapshotPrivKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) - snapshotPEMPrivKey, err := trustmanager.KeyToPEM(snapshotPrivKey, data.CanonicalSnapshotRole) + snapshotPEMPrivKey, err := utils.KeyToPEM(snapshotPrivKey, data.CanonicalSnapshotRole) require.NoError(t, err) snapshotPEMKeyFilename := filepath.Join(tempDir, "snapshot_key.key") err = ioutil.WriteFile(snapshotPEMKeyFilename, snapshotPEMPrivKey, 0644) @@ -406,19 +406,19 @@ func TestClientDelegationsInteraction(t *testing.T) { tempFile, err := ioutil.TempFile("", "pemfile") require.NoError(t, err) - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) startTime := time.Now() endTime := startTime.AddDate(10, 0, 0) cert, err := cryptoservice.GenerateCertificate(privKey, "gun", startTime, endTime) require.NoError(t, err) - _, err = tempFile.Write(trustmanager.CertToPEM(cert)) + _, err = tempFile.Write(utils.CertToPEM(cert)) require.NoError(t, err) tempFile.Close() defer os.Remove(tempFile.Name()) rawPubBytes, _ := ioutil.ReadFile(tempFile.Name()) - parsedPubKey, _ := trustmanager.ParsePEMPublicKey(rawPubBytes) + parsedPubKey, _ := utils.ParsePEMPublicKey(rawPubBytes) keyID, err := utils.CanonicalKeyID(parsedPubKey) require.NoError(t, err) @@ -494,20 +494,20 @@ func TestClientDelegationsInteraction(t *testing.T) { tempFile2, err := ioutil.TempFile("", "pemfile2") require.NoError(t, err) - privKey, err = trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err = utils.GenerateECDSAKey(rand.Reader) startTime = time.Now() endTime = startTime.AddDate(10, 0, 0) cert, err = cryptoservice.GenerateCertificate(privKey, "gun", startTime, endTime) require.NoError(t, err) - _, err = tempFile2.Write(trustmanager.CertToPEM(cert)) + _, err = tempFile2.Write(utils.CertToPEM(cert)) require.NoError(t, err) require.NoError(t, err) tempFile2.Close() defer os.Remove(tempFile2.Name()) rawPubBytes2, _ := ioutil.ReadFile(tempFile2.Name()) - parsedPubKey2, _ := trustmanager.ParsePEMPublicKey(rawPubBytes2) + parsedPubKey2, _ := utils.ParsePEMPublicKey(rawPubBytes2) keyID2, err := utils.CanonicalKeyID(parsedPubKey2) require.NoError(t, err) @@ -777,24 +777,24 @@ func TestClientDelegationsPublishing(t *testing.T) { tempFile, err := ioutil.TempFile("", "pemfile") require.NoError(t, err) - privKey, err := trustmanager.GenerateRSAKey(rand.Reader, 2048) + privKey, err := utils.GenerateRSAKey(rand.Reader, 2048) require.NoError(t, err) - privKeyBytesNoRole, err := trustmanager.KeyToPEM(privKey, "") + privKeyBytesNoRole, err := utils.KeyToPEM(privKey, "") require.NoError(t, err) - privKeyBytesWithRole, err := trustmanager.KeyToPEM(privKey, "user") + privKeyBytesWithRole, err := utils.KeyToPEM(privKey, "user") require.NoError(t, err) startTime := time.Now() endTime := startTime.AddDate(10, 0, 0) cert, err := cryptoservice.GenerateCertificate(privKey, "gun", startTime, endTime) require.NoError(t, err) - _, err = tempFile.Write(trustmanager.CertToPEM(cert)) + _, err = tempFile.Write(utils.CertToPEM(cert)) require.NoError(t, err) tempFile.Close() defer os.Remove(tempFile.Name()) rawPubBytes, _ := ioutil.ReadFile(tempFile.Name()) - parsedPubKey, _ := trustmanager.ParsePEMPublicKey(rawPubBytes) + parsedPubKey, _ := utils.ParsePEMPublicKey(rawPubBytes) canonicalKeyID, err := utils.CanonicalKeyID(parsedPubKey) require.NoError(t, err) diff --git a/cmd/notary/keys_test.go b/cmd/notary/keys_test.go index 44cf65dbc..cde7209fd 100644 --- a/cmd/notary/keys_test.go +++ b/cmd/notary/keys_test.go @@ -24,6 +24,7 @@ import ( "github.com/docker/notary/trustmanager" "github.com/docker/notary/trustpinning" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" "github.com/spf13/cobra" "github.com/spf13/viper" "github.com/stretchr/testify/require" @@ -50,7 +51,7 @@ func TestRemoveOneKeyAbort(t *testing.T) { nos := []string{"no", "NO", "AAAARGH", " N "} store := trustmanager.NewKeyMemoryStore(ret) - key, err := trustmanager.GenerateED25519Key(rand.Reader) + key, err := utils.GenerateED25519Key(rand.Reader) require.NoError(t, err) err = store.AddKey(trustmanager.KeyInfo{Role: data.CanonicalRootRole, Gun: ""}, key) require.NoError(t, err) @@ -82,7 +83,7 @@ func TestRemoveOneKeyConfirm(t *testing.T) { for _, yesAnswer := range yesses { store := trustmanager.NewKeyMemoryStore(ret) - key, err := trustmanager.GenerateED25519Key(rand.Reader) + key, err := utils.GenerateED25519Key(rand.Reader) require.NoError(t, err) err = store.AddKey(trustmanager.KeyInfo{Role: data.CanonicalRootRole, Gun: ""}, key) require.NoError(t, err) @@ -110,7 +111,7 @@ func TestRemoveMultikeysInvalidInput(t *testing.T) { setUp(t) in := bytes.NewBuffer([]byte("notanumber\n9999\n-3\n0")) - key, err := trustmanager.GenerateED25519Key(rand.Reader) + key, err := utils.GenerateED25519Key(rand.Reader) require.NoError(t, err) stores := []trustmanager.KeyStore{ @@ -159,7 +160,7 @@ func TestRemoveMultikeysAbortChoice(t *testing.T) { setUp(t) in := bytes.NewBuffer([]byte("1\nn\n")) - key, err := trustmanager.GenerateED25519Key(rand.Reader) + key, err := utils.GenerateED25519Key(rand.Reader) require.NoError(t, err) stores := []trustmanager.KeyStore{ @@ -198,7 +199,7 @@ func TestRemoveMultikeysRemoveOnlyChosenKey(t *testing.T) { setUp(t) in := bytes.NewBuffer([]byte("1\ny\n")) - key, err := trustmanager.GenerateED25519Key(rand.Reader) + key, err := utils.GenerateED25519Key(rand.Reader) require.NoError(t, err) stores := []trustmanager.KeyStore{ @@ -535,11 +536,11 @@ func TestChangeKeyPassphraseNonexistentID(t *testing.T) { func generateTempTestKeyFile(t *testing.T, role string) string { setUp(t) - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) if err != nil { return "" } - keyBytes, err := trustmanager.KeyToPEM(privKey, role) + keyBytes, err := utils.KeyToPEM(privKey, role) require.NoError(t, err) tempPrivFile, err := ioutil.TempFile("/tmp", "privfile") diff --git a/cmd/notary/prettyprint_test.go b/cmd/notary/prettyprint_test.go index 4cde23891..9eec0c64c 100644 --- a/cmd/notary/prettyprint_test.go +++ b/cmd/notary/prettyprint_test.go @@ -15,6 +15,7 @@ import ( "github.com/docker/notary/passphrase" "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" "github.com/stretchr/testify/require" ) @@ -56,7 +57,7 @@ func TestKeyInfoSorter(t *testing.T) { } type otherMemoryStore struct { - trustmanager.KeyMemoryStore + trustmanager.GenericKeyStore } func (l *otherMemoryStore) Name() string { @@ -85,14 +86,14 @@ func TestPrettyPrintRootAndSigningKeys(t *testing.T) { ret := passphrase.ConstantRetriever("pass") keyStores := []trustmanager.KeyStore{ trustmanager.NewKeyMemoryStore(ret), - &otherMemoryStore{KeyMemoryStore: *trustmanager.NewKeyMemoryStore(ret)}, + &otherMemoryStore{GenericKeyStore: *trustmanager.NewKeyMemoryStore(ret)}, } longNameShortened := "..." + strings.Repeat("z", 37) keys := make([]data.PrivateKey, 4) for i := 0; i < 4; i++ { - key, err := trustmanager.GenerateED25519Key(rand.Reader) + key, err := utils.GenerateED25519Key(rand.Reader) require.NoError(t, err) keys[i] = key } diff --git a/cryptoservice/certificate.go b/cryptoservice/certificate.go index ff6f41b47..805a169af 100644 --- a/cryptoservice/certificate.go +++ b/cryptoservice/certificate.go @@ -7,8 +7,8 @@ import ( "fmt" "time" - "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" ) // GenerateCertificate generates an X509 Certificate from a template, given a GUN and validity interval @@ -22,7 +22,7 @@ func GenerateCertificate(rootKey data.PrivateKey, gun string, startTime, endTime } func generateCertificate(signer crypto.Signer, gun string, startTime, endTime time.Time) (*x509.Certificate, error) { - template, err := trustmanager.NewCertificate(gun, startTime, endTime) + template, err := utils.NewCertificate(gun, startTime, endTime) if err != nil { return nil, fmt.Errorf("failed to create the certificate template for: %s (%v)", gun, err) } diff --git a/cryptoservice/certificate_test.go b/cryptoservice/certificate_test.go index 00ef2653f..d7395bd02 100644 --- a/cryptoservice/certificate_test.go +++ b/cryptoservice/certificate_test.go @@ -8,11 +8,12 @@ import ( "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" "github.com/stretchr/testify/require" ) func TestGenerateCertificate(t *testing.T) { - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate key") keyStore := trustmanager.NewKeyMemoryStore(passphraseRetriever) diff --git a/cryptoservice/crypto_service.go b/cryptoservice/crypto_service.go index f86aee23c..92782309e 100644 --- a/cryptoservice/crypto_service.go +++ b/cryptoservice/crypto_service.go @@ -10,6 +10,7 @@ import ( "github.com/Sirupsen/logrus" "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" ) const ( @@ -44,17 +45,17 @@ func (cs *CryptoService) Create(role, gun, algorithm string) (data.PublicKey, er switch algorithm { case data.RSAKey: - privKey, err = trustmanager.GenerateRSAKey(rand.Reader, rsaKeySize) + privKey, err = utils.GenerateRSAKey(rand.Reader, rsaKeySize) if err != nil { return nil, fmt.Errorf("failed to generate RSA key: %v", err) } case data.ECDSAKey: - privKey, err = trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err = utils.GenerateECDSAKey(rand.Reader) if err != nil { return nil, fmt.Errorf("failed to generate EC key: %v", err) } case data.ED25519Key: - privKey, err = trustmanager.GenerateED25519Key(rand.Reader) + privKey, err = utils.GenerateED25519Key(rand.Reader) if err != nil { return nil, fmt.Errorf("failed to generate ED25519 key: %v", err) } diff --git a/cryptoservice/crypto_service_test.go b/cryptoservice/crypto_service_test.go index c0958b0ce..b8426a588 100644 --- a/cryptoservice/crypto_service_test.go +++ b/cryptoservice/crypto_service_test.go @@ -16,6 +16,7 @@ import ( "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" "github.com/docker/notary/tuf/testutils/interfaces" + "github.com/docker/notary/tuf/utils" ) var algoToSigType = map[string]data.SigAlgorithm{ @@ -130,7 +131,7 @@ func (c CryptoServiceTester) TestSignWithKey(t *testing.T) { func (c CryptoServiceTester) TestSignNoMatchingKeys(t *testing.T) { cryptoService := c.cryptoServiceFactory() - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, c.errorMsg("error creating key")) // Test Sign @@ -144,7 +145,7 @@ func (c CryptoServiceTester) TestGetPrivateKeyMultipleKeystores(t *testing.T) { cryptoService.keyStores = append(cryptoService.keyStores, trustmanager.NewKeyMemoryStore(passphraseRetriever)) - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, c.errorMsg("error creating key")) for _, store := range cryptoService.keyStores { @@ -234,7 +235,7 @@ func (c CryptoServiceTester) TestRemoveFromMultipleKeystores(t *testing.T) { cryptoService.keyStores = append(cryptoService.keyStores, trustmanager.NewKeyMemoryStore(passphraseRetriever)) - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, c.errorMsg("error creating key")) for _, store := range cryptoService.keyStores { @@ -264,7 +265,7 @@ func (c CryptoServiceTester) TestListFromMultipleKeystores(t *testing.T) { expectedKeysIDs := make(map[string]bool) // just want to be able to index by key for i := 0; i < 3; i++ { - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, c.errorMsg("error creating key")) expectedKeysIDs[privKey.ID()] = true @@ -308,7 +309,7 @@ func (c CryptoServiceTester) TestAddKey(t *testing.T) { cryptoService.keyStores = append(cryptoService.keyStores, trustmanager.NewKeyMemoryStore(passphraseRetriever)) - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) // Add the key to the targets role diff --git a/server/handlers/default_test.go b/server/handlers/default_test.go index 06824a64d..29646a737 100644 --- a/server/handlers/default_test.go +++ b/server/handlers/default_test.go @@ -16,9 +16,9 @@ import ( "github.com/docker/distribution/registry/api/errcode" "github.com/docker/notary/server/errors" "github.com/docker/notary/server/storage" + store "github.com/docker/notary/storage" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" - "github.com/docker/notary/tuf/store" "github.com/docker/notary/tuf/validation" "github.com/docker/notary/tuf/testutils" diff --git a/server/integration_test.go b/server/integration_test.go index 29eb6f3ef..051030cde 100644 --- a/server/integration_test.go +++ b/server/integration_test.go @@ -9,9 +9,9 @@ import ( "testing" "github.com/docker/notary/server/storage" + store "github.com/docker/notary/storage" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" - "github.com/docker/notary/tuf/store" "github.com/docker/notary/tuf/testutils" "github.com/docker/notary/tuf/validation" "github.com/stretchr/testify/require" @@ -45,7 +45,7 @@ func TestValidationErrorFormat(t *testing.T) { // No snapshot is passed, and the server doesn't have the snapshot key, // so ErrBadHierarchy - err = client.SetMultiMeta(map[string][]byte{ + err = client.SetMulti(map[string][]byte{ data.CanonicalRootRole: rs, data.CanonicalTargetsRole: rt, }) diff --git a/server/server_test.go b/server/server_test.go index 0cb9deb14..ea117f399 100644 --- a/server/server_test.go +++ b/server/server_test.go @@ -16,9 +16,9 @@ import ( _ "github.com/docker/distribution/registry/auth/silly" "github.com/docker/notary" "github.com/docker/notary/server/storage" + store "github.com/docker/notary/storage" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" - "github.com/docker/notary/tuf/store" "github.com/docker/notary/tuf/testutils" tufutils "github.com/docker/notary/tuf/utils" "github.com/docker/notary/utils" @@ -77,12 +77,12 @@ func TestRepoPrefixMatches(t *testing.T) { require.NoError(t, err) // uploading is cool - require.NoError(t, uploader.SetMultiMeta(meta)) + require.NoError(t, uploader.SetMulti(meta)) // getting is cool - _, err = uploader.GetMeta(data.CanonicalSnapshotRole, notary.MaxDownloadSize) + _, err = uploader.GetSized(data.CanonicalSnapshotRole, notary.MaxDownloadSize) require.NoError(t, err) - _, err = uploader.GetMeta( + _, err = uploader.GetSized( tufutils.ConsistentName(data.CanonicalSnapshotRole, snChecksumBytes[:]), notary.MaxDownloadSize) require.NoError(t, err) @@ -117,7 +117,7 @@ func TestRepoPrefixDoesNotMatch(t *testing.T) { uploader, err := store.NewHTTPStore(url, "", "json", "key", http.DefaultTransport) require.NoError(t, err) - require.Error(t, uploader.SetMultiMeta(meta)) + require.Error(t, uploader.SetMulti(meta)) // update the storage so we don't fail just because the metadata is missing for _, roleName := range data.BaseRoles { @@ -128,10 +128,10 @@ func TestRepoPrefixDoesNotMatch(t *testing.T) { })) } - _, err = uploader.GetMeta(data.CanonicalSnapshotRole, notary.MaxDownloadSize) + _, err = uploader.GetSized(data.CanonicalSnapshotRole, notary.MaxDownloadSize) require.Error(t, err) - _, err = uploader.GetMeta( + _, err = uploader.GetSized( tufutils.ConsistentName(data.CanonicalSnapshotRole, snChecksumBytes[:]), notary.MaxDownloadSize) require.Error(t, err) diff --git a/signer/client/signer_trust_test.go b/signer/client/signer_trust_test.go index 45168a0b2..5f0193ff0 100644 --- a/signer/client/signer_trust_test.go +++ b/signer/client/signer_trust_test.go @@ -19,6 +19,7 @@ import ( "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" "github.com/docker/notary/tuf/testutils/interfaces" + "github.com/docker/notary/tuf/utils" "github.com/stretchr/testify/require" "golang.org/x/net/context" ) @@ -109,7 +110,7 @@ func TestHealthCheckConnectionDied(t *testing.T) { var ret = passphrase.ConstantRetriever("pass") func TestGetPrivateKeyAndSignWithExistingKey(t *testing.T) { - key, err := trustmanager.GenerateECDSAKey(rand.Reader) + key, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate key") store := trustmanager.NewKeyMemoryStore(ret) diff --git a/signer/keydbstore/keydbstore_test.go b/signer/keydbstore/keydbstore_test.go index c5eb3c099..ac6433fe4 100644 --- a/signer/keydbstore/keydbstore_test.go +++ b/signer/keydbstore/keydbstore_test.go @@ -10,6 +10,7 @@ import ( "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" "github.com/jinzhu/gorm" _ "github.com/mattn/go-sqlite3" "github.com/stretchr/testify/require" @@ -73,7 +74,7 @@ func TestNewKeyDBStorePropagatesDBError(t *testing.T) { // Creating a key, on succcess, populates the cache. func TestCreateSuccessPopulatesCache(t *testing.T) { - testKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + testKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) tmpFilename := initializeDB(t) @@ -92,7 +93,7 @@ func TestCreateSuccessPopulatesCache(t *testing.T) { // Getting a key, on succcess, populates the cache. func TestGetSuccessPopulatesCache(t *testing.T) { - testKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + testKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) tmpFilename := initializeDB(t) @@ -112,10 +113,10 @@ func TestGetSuccessPopulatesCache(t *testing.T) { } func TestDoubleCreate(t *testing.T) { - testKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + testKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) - anotherTestKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + anotherTestKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) tmpFilename := initializeDB(t) @@ -139,7 +140,7 @@ func TestDoubleCreate(t *testing.T) { } func TestCreateDelete(t *testing.T) { - testKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + testKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) tmpFilename := initializeDB(t) @@ -163,7 +164,7 @@ func TestCreateDelete(t *testing.T) { } func TestKeyRotation(t *testing.T) { - testKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + testKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) tmpFilename := initializeDB(t) diff --git a/tuf/store/errors.go b/storage/errors.go similarity index 59% rename from tuf/store/errors.go rename to storage/errors.go index a7f63d6bb..2c7b87649 100644 --- a/tuf/store/errors.go +++ b/storage/errors.go @@ -1,6 +1,15 @@ -package store +package storage -import "fmt" +import ( + "errors" + "fmt" +) + +var ( + // ErrPathOutsideStore indicates that the returned path would be + // outside the store + ErrPathOutsideStore = errors.New("path outside file store") +) // ErrMetaNotFound indicates we did not find a particular piece // of metadata in the store diff --git a/storage/filestore.go b/storage/filestore.go new file mode 100644 index 000000000..b99079bcc --- /dev/null +++ b/storage/filestore.go @@ -0,0 +1,215 @@ +package storage + +import ( + "fmt" + "github.com/docker/notary" + "io" + "io/ioutil" + "os" + "path" + "path/filepath" + "strings" +) + +// NewFilesystemStore creates a new store in a directory tree +func NewFilesystemStore(baseDir, subDir, extension string) (*FilesystemStore, error) { + baseDir = path.Join(baseDir, subDir) + + return NewFileStore(baseDir, extension, notary.PrivKeyPerms) +} + +// NewFileStore creates a fully configurable file store +func NewFileStore(baseDir, fileExt string, perms os.FileMode) (*FilesystemStore, error) { + baseDir = filepath.Clean(baseDir) + if err := createDirectory(baseDir, perms); err != nil { + return nil, err + } + if !strings.HasPrefix(fileExt, ".") { + fileExt = "." + fileExt + } + + return &FilesystemStore{ + baseDir: baseDir, + ext: fileExt, + perms: perms, + }, nil +} + +// NewSimpleFileStore is a convenience wrapper to create a world readable, +// owner writeable filestore +func NewSimpleFileStore(baseDir, fileExt string) (*FilesystemStore, error) { + return NewFileStore(baseDir, fileExt, notary.PubCertPerms) +} + +// NewPrivateSimpleFileStore is a wrapper to create an owner readable/writeable +// _only_ filestore +func NewPrivateSimpleFileStore(baseDir, fileExt string) (*FilesystemStore, error) { + return NewFileStore(baseDir, fileExt, notary.PrivKeyPerms) +} + +// FilesystemStore is a store in a locally accessible directory +type FilesystemStore struct { + baseDir string + ext string + perms os.FileMode +} + +func (f *FilesystemStore) getPath(name string) (string, error) { + fileName := fmt.Sprintf("%s%s", name, f.ext) + fullPath := filepath.Join(f.baseDir, fileName) + + if !strings.HasPrefix(fullPath, f.baseDir) { + return "", ErrPathOutsideStore + } + return fullPath, nil +} + +// GetSized returns the meta for the given name (a role) up to size bytes +// If size is "NoSizeLimit", this corresponds to "infinite," but we cut off at a +// predefined threshold "notary.MaxDownloadSize". If the file is larger than size +// we return ErrMaliciousServer for consistency with the HTTPStore +func (f *FilesystemStore) GetSized(name string, size int64) ([]byte, error) { + p, err := f.getPath(name) + if err != nil { + return nil, err + } + file, err := os.OpenFile(p, os.O_RDONLY, f.perms) + if err != nil { + if os.IsNotExist(err) { + err = ErrMetaNotFound{Resource: name} + } + return nil, err + } + defer file.Close() + + if size == NoSizeLimit { + size = notary.MaxDownloadSize + } + + stat, err := file.Stat() + if err != nil { + return nil, err + } + if stat.Size() > size { + return nil, ErrMaliciousServer{} + } + + l := io.LimitReader(file, size) + return ioutil.ReadAll(l) +} + +// Get returns the meta for the given name. +func (f *FilesystemStore) Get(name string) ([]byte, error) { + p, err := f.getPath(name) + if err != nil { + return nil, err + } + meta, err := ioutil.ReadFile(p) + if err != nil { + if os.IsNotExist(err) { + err = ErrMetaNotFound{Resource: name} + } + return nil, err + } + return meta, nil +} + +// SetMulti sets the metadata for multiple roles in one operation +func (f *FilesystemStore) SetMulti(metas map[string][]byte) error { + for role, blob := range metas { + err := f.Set(role, blob) + if err != nil { + return err + } + } + return nil +} + +// Set sets the meta for a single role +func (f *FilesystemStore) Set(name string, meta []byte) error { + fp, err := f.getPath(name) + if err != nil { + return err + } + + // Ensures the parent directories of the file we are about to write exist + err = os.MkdirAll(filepath.Dir(fp), f.perms) + if err != nil { + return err + } + + // if something already exists, just delete it and re-write it + os.RemoveAll(fp) + + // Write the file to disk + if err = ioutil.WriteFile(fp, meta, f.perms); err != nil { + return err + } + return nil +} + +// RemoveAll clears the existing filestore by removing its base directory +func (f *FilesystemStore) RemoveAll() error { + return os.RemoveAll(f.baseDir) +} + +// Remove removes the metadata for a single role - if the metadata doesn't +// exist, no error is returned +func (f *FilesystemStore) Remove(name string) error { + p, err := f.getPath(name) + if err != nil { + return err + } + return os.RemoveAll(p) // RemoveAll succeeds if path doesn't exist +} + +// Location returns a human readable name for the storage location +func (f FilesystemStore) Location() string { + return f.baseDir +} + +// ListFiles returns a list of all the filenames that can be used with Get* +// to retrieve content from this filestore +func (f FilesystemStore) ListFiles() []string { + files := make([]string, 0, 0) + filepath.Walk(f.baseDir, func(fp string, fi os.FileInfo, err error) error { + // If there are errors, ignore this particular file + if err != nil { + return nil + } + // Ignore if it is a directory + if fi.IsDir() { + return nil + } + + // If this is a symlink, ignore it + if fi.Mode()&os.ModeSymlink == os.ModeSymlink { + return nil + } + + // Only allow matches that end with our certificate extension (e.g. *.crt) + matched, _ := filepath.Match("*"+f.ext, fi.Name()) + + if matched { + // Find the relative path for this file relative to the base path. + fp, err = filepath.Rel(f.baseDir, fp) + if err != nil { + return err + } + trimmed := strings.TrimSuffix(fp, f.ext) + files = append(files, trimmed) + } + return nil + }) + return files +} + +// createDirectory receives a string of the path to a directory. +// It does not support passing files, so the caller has to remove +// the filename by doing filepath.Dir(full_path_to_file) +func createDirectory(dir string, perms os.FileMode) error { + // This prevents someone passing /path/to/dir and 'dir' not being created + // If two '//' exist, MkdirAll deals it with correctly + dir = dir + "/" + return os.MkdirAll(dir, perms) +} diff --git a/trustmanager/filestore_test.go b/storage/filestore_test.go similarity index 55% rename from trustmanager/filestore_test.go rename to storage/filestore_test.go index d9ae35c54..43f739a45 100644 --- a/trustmanager/filestore_test.go +++ b/storage/filestore_test.go @@ -1,16 +1,147 @@ -package trustmanager +package storage import ( - "crypto/rand" - "fmt" - "github.com/stretchr/testify/require" "io/ioutil" "os" + "path" "path/filepath" - "strconv" "testing" + + "crypto/rand" + "fmt" + "github.com/docker/notary" + "github.com/stretchr/testify/require" + "strconv" ) +const testDir = "/tmp/testFilesystemStore/" + +func TestNewFilesystemStore(t *testing.T) { + _, err := NewFilesystemStore(testDir, "metadata", "json") + require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) + defer os.RemoveAll(testDir) + + info, err := os.Stat(path.Join(testDir, "metadata")) + require.Nil(t, err, "Error attempting to stat metadata dir: %v", err) + require.NotNil(t, info, "Nil FileInfo from stat on metadata dir") + require.True(t, 0700&info.Mode() != 0, "Metadata directory is not writable") +} + +func TestSet(t *testing.T) { + s, err := NewFilesystemStore(testDir, "metadata", "json") + require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) + defer os.RemoveAll(testDir) + + testContent := []byte("test data") + + err = s.Set("testMeta", testContent) + require.Nil(t, err, "Set returned unexpected error: %v", err) + + content, err := ioutil.ReadFile(path.Join(testDir, "metadata", "testMeta.json")) + require.Nil(t, err, "Error reading file: %v", err) + require.Equal(t, testContent, content, "Content written to file was corrupted.") +} + +func TestSetWithNoParentDirectory(t *testing.T) { + s, err := NewFilesystemStore(testDir, "metadata", "json") + require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) + defer os.RemoveAll(testDir) + + testContent := []byte("test data") + + err = s.Set("noexist/"+"testMeta", testContent) + require.Nil(t, err, "Set returned unexpected error: %v", err) + + content, err := ioutil.ReadFile(path.Join(testDir, "metadata", "noexist/testMeta.json")) + require.Nil(t, err, "Error reading file: %v", err) + require.Equal(t, testContent, content, "Content written to file was corrupted.") +} + +// if something already existed there, remove it first and write a new file +func TestSetRemovesExistingFileBeforeWriting(t *testing.T) { + s, err := NewFilesystemStore(testDir, "metadata", "json") + require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) + defer os.RemoveAll(testDir) + + // make a directory where we want metadata to go + os.Mkdir(filepath.Join(testDir, "metadata", "root.json"), 0700) + + testContent := []byte("test data") + err = s.Set("root", testContent) + require.NoError(t, err, "Set returned unexpected error: %v", err) + + content, err := ioutil.ReadFile(path.Join(testDir, "metadata", "root.json")) + require.NoError(t, err, "Error reading file: %v", err) + require.Equal(t, testContent, content, "Content written to file was corrupted.") +} + +func TestGetSized(t *testing.T) { + s, err := NewFilesystemStore(testDir, "metadata", "json") + require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) + defer os.RemoveAll(testDir) + + testContent := []byte("test data") + + ioutil.WriteFile(path.Join(testDir, "metadata", "testMeta.json"), testContent, 0600) + + content, err := s.GetSized("testMeta", int64(len(testContent))) + require.Nil(t, err, "GetSized returned unexpected error: %v", err) + + require.Equal(t, testContent, content, "Content read from file was corrupted.") + + // Check that NoSizeLimit size reads everything + content, err = s.GetSized("testMeta", NoSizeLimit) + require.Nil(t, err, "GetSized returned unexpected error: %v", err) + + require.Equal(t, testContent, content, "Content read from file was corrupted.") + + // Check that we error if the file is larger than the expected size + content, err = s.GetSized("testMeta", 4) + require.Error(t, err) + require.Len(t, content, 0) +} + +func TestGetSizedSet(t *testing.T) { + s, err := NewFilesystemStore(testDir, "metadata", "json") + require.NoError(t, err, "Initializing FilesystemStore returned unexpected error", err) + defer os.RemoveAll(testDir) + + testGetSetMeta(t, func() MetadataStore { return s }) +} + +func TestRemove(t *testing.T) { + s, err := NewFilesystemStore(testDir, "metadata", "json") + require.NoError(t, err, "Initializing FilesystemStore returned unexpected error", err) + defer os.RemoveAll(testDir) + + testRemove(t, func() MetadataStore { return s }) +} + +func TestRemoveAll(t *testing.T) { + s, err := NewFilesystemStore(testDir, "metadata", "json") + require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) + defer os.RemoveAll(testDir) + + testContent := []byte("test data") + + // Write some files in metadata and targets dirs + metaPath := path.Join(testDir, "metadata", "testMeta.json") + ioutil.WriteFile(metaPath, testContent, 0600) + + // Remove all + err = s.RemoveAll() + require.Nil(t, err, "Removing all from FilesystemStore returned unexpected error: %v", err) + + // Test that files no longer exist + _, err = ioutil.ReadFile(metaPath) + require.True(t, os.IsNotExist(err)) + + // Removing the empty filestore returns nil + require.Nil(t, s.RemoveAll()) +} + +// Tests originally from Trustmanager ensuring the FilesystemStore satisfies the +// necessary behaviour func TestAddFile(t *testing.T) { testData := []byte("This test data should be part of the file.") testName := "docker.com/notary/certificate" @@ -25,15 +156,15 @@ func TestAddFile(t *testing.T) { // Since we're generating this manually we need to add the extension '.' expectedFilePath := filepath.Join(tempBaseDir, testName+testExt) - // Create our SimpleFileStore - store := &SimpleFileStore{ + // Create our FilesystemStore + store := &FilesystemStore{ baseDir: tempBaseDir, - fileExt: testExt, + ext: testExt, perms: perms, } - // Call the Add function - err = store.Add(testName, testData) + // Call the Set function + err = store.Set(testName, testData) require.NoError(t, err) // Check to see if file exists @@ -58,10 +189,10 @@ func TestRemoveFile(t *testing.T) { _, err = generateRandomFile(expectedFilePath, perms) require.NoError(t, err) - // Create our SimpleFileStore - store := &SimpleFileStore{ + // Create our FilesystemStore + store := &FilesystemStore{ baseDir: tempBaseDir, - fileExt: testExt, + ext: testExt, perms: perms, } @@ -94,10 +225,10 @@ func TestListFiles(t *testing.T) { require.NoError(t, err) } - // Create our SimpleFileStore - store := &SimpleFileStore{ + // Create our FilesystemStore + store := &FilesystemStore{ baseDir: tempBaseDir, - fileExt: testExt, + ext: testExt, perms: perms, } @@ -110,20 +241,20 @@ func TestGetPath(t *testing.T) { testExt := ".crt" perms := os.FileMode(0755) - // Create our SimpleFileStore - store := &SimpleFileStore{ + // Create our FilesystemStore + store := &FilesystemStore{ baseDir: "", - fileExt: testExt, + ext: testExt, perms: perms, } firstPath := "diogomonica.com/openvpn/0xdeadbeef.crt" secondPath := "/docker.io/testing-dashes/@#$%^&().crt" - result, err := store.GetPath("diogomonica.com/openvpn/0xdeadbeef") + result, err := store.getPath("diogomonica.com/openvpn/0xdeadbeef") require.Equal(t, firstPath, result, "unexpected error from GetPath: %v", err) - result, err = store.GetPath("/docker.io/testing-dashes/@#$%^&()") + result, err = store.getPath("/docker.io/testing-dashes/@#$%^&()") require.Equal(t, secondPath, result, "unexpected error from GetPath: %v", err) } @@ -131,46 +262,46 @@ func TestGetPathProtection(t *testing.T) { testExt := ".crt" perms := os.FileMode(0755) - // Create our SimpleFileStore - store := &SimpleFileStore{ + // Create our FilesystemStore + store := &FilesystemStore{ baseDir: "/path/to/filestore/", - fileExt: testExt, + ext: testExt, perms: perms, } // Should deny requests for paths outside the filestore - _, err := store.GetPath("../../etc/passwd") + _, err := store.getPath("../../etc/passwd") require.Error(t, err) require.Equal(t, ErrPathOutsideStore, err) - _, err = store.GetPath("private/../../../etc/passwd") + _, err = store.getPath("private/../../../etc/passwd") require.Error(t, err) require.Equal(t, ErrPathOutsideStore, err) // Convoluted paths should work as long as they end up inside the store expected := "/path/to/filestore/filename.crt" - result, err := store.GetPath("private/../../filestore/./filename") + result, err := store.getPath("private/../../filestore/./filename") require.NoError(t, err) require.Equal(t, expected, result) // Repeat tests with a relative baseDir - relStore := &SimpleFileStore{ + relStore := &FilesystemStore{ baseDir: "relative/file/path", - fileExt: testExt, + ext: testExt, perms: perms, } // Should deny requests for paths outside the filestore - _, err = relStore.GetPath("../../etc/passwd") + _, err = relStore.getPath("../../etc/passwd") require.Error(t, err) require.Equal(t, ErrPathOutsideStore, err) - _, err = relStore.GetPath("private/../../../etc/passwd") + _, err = relStore.getPath("private/../../../etc/passwd") require.Error(t, err) require.Equal(t, ErrPathOutsideStore, err) // Convoluted paths should work as long as they end up inside the store expected = "relative/file/path/filename.crt" - result, err = relStore.GetPath("private/../../path/./filename") + result, err = relStore.getPath("private/../../path/./filename") require.NoError(t, err) require.Equal(t, expected, result) } @@ -191,10 +322,10 @@ func TestGetData(t *testing.T) { expectedData, err := generateRandomFile(expectedFilePath, perms) require.NoError(t, err) - // Create our SimpleFileStore - store := &SimpleFileStore{ + // Create our FilesystemStore + store := &FilesystemStore{ baseDir: tempBaseDir, - fileExt: testExt, + ext: testExt, perms: perms, } testData, err := store.Get(testName) @@ -213,7 +344,7 @@ func TestCreateDirectory(t *testing.T) { dirPath := filepath.Join(tempBaseDir, testDir) // Call createDirectory - createDirectory(dirPath, visible) + createDirectory(dirPath, notary.PubCertPerms) // Check to see if file exists fi, err := os.Stat(dirPath) @@ -237,7 +368,7 @@ func TestCreatePrivateDirectory(t *testing.T) { dirPath := filepath.Join(tempBaseDir, testDir) // Call createDirectory - createDirectory(dirPath, private) + createDirectory(dirPath, notary.PrivKeyPerms) // Check to see if file exists fi, err := os.Stat(dirPath) @@ -297,10 +428,10 @@ func TestFileStoreConsistency(t *testing.T) { file2Path := "path/file2" file3Path := "long/path/file3" - for _, s := range []Storage{s, s2} { - s.Add(file1Path, file1Data) - s.Add(file2Path, file2Data) - s.Add(file3Path, file3Data) + for _, s := range []*FilesystemStore{s, s2} { + s.Set(file1Path, file1Data) + s.Set(file2Path, file2Data) + s.Set(file3Path, file3Data) paths := map[string][]byte{ file1Path: file1Data, diff --git a/tuf/store/httpstore.go b/storage/httpstore.go similarity index 92% rename from tuf/store/httpstore.go rename to storage/httpstore.go index 3fbddfc03..1a0d9c583 100644 --- a/tuf/store/httpstore.go +++ b/storage/httpstore.go @@ -8,7 +8,7 @@ // If writing your own server, please have a look at // github.com/docker/distribution/registry/api/errcode -package store +package storage import ( "bytes" @@ -136,12 +136,12 @@ func translateStatusToError(resp *http.Response, resource string) error { } } -// GetMeta downloads the named meta file with the given size. A short body +// GetSized downloads the named meta file with the given size. A short body // is acceptable because in the case of timestamp.json, the size is a cap, // not an exact length. // If size is "NoSizeLimit", this corresponds to "infinite," but we cut off at a // predefined threshold "notary.MaxDownloadSize". -func (s HTTPStore) GetMeta(name string, size int64) ([]byte, error) { +func (s HTTPStore) GetSized(name string, size int64) ([]byte, error) { url, err := s.buildMetaURL(name) if err != nil { return nil, err @@ -174,8 +174,8 @@ func (s HTTPStore) GetMeta(name string, size int64) ([]byte, error) { return body, nil } -// SetMeta uploads a piece of TUF metadata to the server -func (s HTTPStore) SetMeta(name string, blob []byte) error { +// Set uploads a piece of TUF metadata to the server +func (s HTTPStore) Set(name string, blob []byte) error { url, err := s.buildMetaURL("") if err != nil { return err @@ -192,9 +192,9 @@ func (s HTTPStore) SetMeta(name string, blob []byte) error { return translateStatusToError(resp, "POST "+name) } -// RemoveMeta always fails, because we should never be able to delete metadata -// for individual TUF metadata remotely -func (s HTTPStore) RemoveMeta(name string) error { +// Remove always fails, because we should never be able to delete metadata +// remotely +func (s HTTPStore) Remove(name string) error { return ErrInvalidOperation{msg: "cannot delete individual metadata files"} } @@ -222,10 +222,10 @@ func NewMultiPartMetaRequest(url string, metas map[string][]byte) (*http.Request return req, nil } -// SetMultiMeta does a single batch upload of multiple pieces of TUF metadata. +// SetMulti does a single batch upload of multiple pieces of TUF metadata. // This should be preferred for updating a remote server as it enable the server // to remain consistent, either accepting or rejecting the complete update. -func (s HTTPStore) SetMultiMeta(metas map[string][]byte) error { +func (s HTTPStore) SetMulti(metas map[string][]byte) error { url, err := s.buildMetaURL("") if err != nil { return err @@ -308,3 +308,8 @@ func (s HTTPStore) GetKey(role string) ([]byte, error) { } return body, nil } + +// Location returns a human readable name for the storage location +func (s HTTPStore) Location() string { + return s.baseURL.String() +} diff --git a/tuf/store/httpstore_test.go b/storage/httpstore_test.go similarity index 56% rename from tuf/store/httpstore_test.go rename to storage/httpstore_test.go index 1a3d58a28..3af684cff 100644 --- a/tuf/store/httpstore_test.go +++ b/storage/httpstore_test.go @@ -1,9 +1,7 @@ -package store +package storage import ( "bytes" - "encoding/base64" - "encoding/hex" "fmt" "io" "io/ioutil" @@ -14,7 +12,6 @@ import ( "github.com/docker/go/canonical/json" "github.com/docker/notary/tuf/data" - "github.com/docker/notary/tuf/signed" "github.com/docker/notary/tuf/validation" "github.com/stretchr/testify/require" ) @@ -29,7 +26,7 @@ func (rt *TestRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) return http.DefaultClient.Do(req) } -func TestHTTPStoreGetMeta(t *testing.T) { +func TestHTTPStoreGetSized(t *testing.T) { handler := func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(testRoot)) } @@ -42,40 +39,13 @@ func TestHTTPStoreGetMeta(t *testing.T) { "key", &http.Transport{}, ) - if err != nil { - t.Fatal(err) - } - j, err := store.GetMeta("root", 4801) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) + j, err := store.GetSized("root", 4801) + require.NoError(t, err) + require.Equal(t, testRoot, string(j)) p := &data.Signed{} err = json.Unmarshal(j, p) - if err != nil { - t.Fatal(err) - } - rootKey, err := base64.StdEncoding.DecodeString(testRootKey) require.NoError(t, err) - k := data.NewPublicKey("ecdsa-x509", rootKey) - - sigBytes := p.Signatures[0].Signature - if err != nil { - t.Fatal(err) - } - var decoded map[string]interface{} - if err := json.Unmarshal(*p.Signed, &decoded); err != nil { - t.Fatal(err) - } - msg, err := json.MarshalCanonical(decoded) - if err != nil { - t.Fatal(err) - } - method := p.Signatures[0].Method - err = signed.Verifiers[method].Verify(k, sigBytes, msg) - if err != nil { - t.Fatal(err) - } - } // Test that passing -1 to httpstore's GetMeta will return all content @@ -92,39 +62,13 @@ func TestHTTPStoreGetAllMeta(t *testing.T) { "key", &http.Transport{}, ) - if err != nil { - t.Fatal(err) - } - j, err := store.GetMeta("root", NoSizeLimit) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) + j, err := store.GetSized("root", NoSizeLimit) + require.NoError(t, err) + require.Equal(t, testRoot, string(j)) p := &data.Signed{} err = json.Unmarshal(j, p) - if err != nil { - t.Fatal(err) - } - rootKey, err := base64.StdEncoding.DecodeString(testRootKey) require.NoError(t, err) - k := data.NewPublicKey("ecdsa-x509", rootKey) - - sigBytes := p.Signatures[0].Signature - if err != nil { - t.Fatal(err) - } - var decoded map[string]interface{} - if err := json.Unmarshal(*p.Signed, &decoded); err != nil { - t.Fatal(err) - } - msg, err := json.MarshalCanonical(decoded) - if err != nil { - t.Fatal(err) - } - method := p.Signatures[0].Method - err = signed.Verifiers[method].Verify(k, sigBytes, msg) - if err != nil { - t.Fatal(err) - } } func TestSetMultiMeta(t *testing.T) { @@ -135,9 +79,7 @@ func TestSetMultiMeta(t *testing.T) { handler := func(w http.ResponseWriter, r *http.Request) { reader, err := r.MultipartReader() - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) updates := make(map[string][]byte) for { part, err := reader.NextPart() @@ -146,9 +88,7 @@ func TestSetMultiMeta(t *testing.T) { } role := strings.TrimSuffix(part.FileName(), ".json") updates[role], err = ioutil.ReadAll(part) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) } rd, rok := updates["root"] require.True(t, rok) @@ -162,46 +102,9 @@ func TestSetMultiMeta(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(handler)) defer server.Close() store, err := NewHTTPStore(server.URL, "metadata", "json", "key", http.DefaultTransport) - if err != nil { - t.Fatal(err) - } - - store.SetMultiMeta(metas) -} - -func TestPyCryptoRSAPSSCompat(t *testing.T) { - pubPem := "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAnKuXZeefa2LmgxaL5NsM\nzKOHNe+x/nL6ik+lDBCTV6OdcwAhHQS+PONGhrChIUVR6Vth3hUCrreLzPO73Oo5\nVSCuRJ53UronENl6lsa5mFKP8StYLvIDITNvkoT3j52BJIjyNUK9UKY9As2TNqDf\nBEPIRp28ev/NViwGOEkBu2UAbwCIdnDXm8JQErCZA0Ydm7PKGgjLbFsFGrVzqXHK\n6pdzJXlhr9yap3UpgQ/iO9JtoEYB2EXsnSrPc9JRjR30bNHHtnVql3fvinXrAEwq\n3xmN4p+R4VGzfdQN+8Kl/IPjqWB535twhFYEG/B7Ze8IwbygBjK3co/KnOPqMUrM\nBI8ztvPiogz+MvXb8WvarZ6TMTh8ifZI96r7zzqyzjR1hJulEy3IsMGvz8XS2J0X\n7sXoaqszEtXdq5ef5zKVxkiyIQZcbPgmpHLq4MgfdryuVVc/RPASoRIXG4lKaTJj\n1ANMFPxDQpHudCLxwCzjCb+sVa20HBRPTnzo8LSZkI6jAgMBAAE=\n-----END PUBLIC KEY-----" - //privPem := "-----BEGIN RSA PRIVATE KEY-----\nMIIG4wIBAAKCAYEAnKuXZeefa2LmgxaL5NsMzKOHNe+x/nL6ik+lDBCTV6OdcwAh\nHQS+PONGhrChIUVR6Vth3hUCrreLzPO73Oo5VSCuRJ53UronENl6lsa5mFKP8StY\nLvIDITNvkoT3j52BJIjyNUK9UKY9As2TNqDfBEPIRp28ev/NViwGOEkBu2UAbwCI\ndnDXm8JQErCZA0Ydm7PKGgjLbFsFGrVzqXHK6pdzJXlhr9yap3UpgQ/iO9JtoEYB\n2EXsnSrPc9JRjR30bNHHtnVql3fvinXrAEwq3xmN4p+R4VGzfdQN+8Kl/IPjqWB5\n35twhFYEG/B7Ze8IwbygBjK3co/KnOPqMUrMBI8ztvPiogz+MvXb8WvarZ6TMTh8\nifZI96r7zzqyzjR1hJulEy3IsMGvz8XS2J0X7sXoaqszEtXdq5ef5zKVxkiyIQZc\nbPgmpHLq4MgfdryuVVc/RPASoRIXG4lKaTJj1ANMFPxDQpHudCLxwCzjCb+sVa20\nHBRPTnzo8LSZkI6jAgMBAAECggGAdzyI7z/HLt2IfoAsXDLynNRgVYZluzgawiU3\ngeUjnnGhpSKWERXJC2IWDPBk0YOGgcnQxErNTdfXiFZ/xfRlSgqjVwob2lRe4w4B\npLr+CZXcgznv1VrPUvdolOSp3R2Mahfn7u0qVDUQ/g8jWVI6KW7FACmQhzQkPM8o\ntLGrpcmK+PA465uaHKtYccEB02ILqrK8v++tknv7eIZczrsSKlS1h/HHjSaidYxP\n2DAUiF7wnChrwwQEvuEUHhwVgQcoDMBoow0zwHdbFiFO2ZT54H2oiJWLhpR/x6RK\ngM1seqoPH2sYErPJACMcYsMtF4Tx7b5c4WSj3vDCGb+jeqnNS6nFC3aMnv75mUS2\nYDPU1heJFd8pNHVf0RDejLZZUiJSnXf3vpOxt9Xv2+4He0jeMfLV7zX0mO2Ni3MJ\nx6PiVy4xerHImOuuHzSla5crOq2ECiAxd1wEOFDRD2LRHzfhpk1ghiA5xA1qwc7Z\neRnkVfoy6PPZ4lZakZTm0p8YCQURAoHBAMUIC/7vnayLae7POmgy+np/ty7iMfyd\nV1eO6LTO21KAaGGlhaY26WD/5LcG2FUgc5jKKahprGrmiNLzLUeQPckJmuijSEVM\nl/4DlRvCo867l7fLaVqYzsQBBdeGIFNiT+FBOd8atff87ZBEfH/rXbDi7METD/VR\n4TdblnCsKYAXEJUdkw3IK7SUGERiQZIwKXrH/Map4ibDrljJ71iCgEureU0DBwcg\nwLftmjGMISoLscdRxeubX5uf/yxtHBJeRwKBwQDLjzHhb4gNGdBHUl4hZPAGCq1V\nLX/GpfoOVObW64Lud+tI6N9GNua5/vWduL7MWWOzDTMZysganhKwsJCY5SqAA9p0\nb6ohusf9i1nUnOa2F2j+weuYPXrTYm+ZrESBBdaEJPuj3R5YHVujrBA9Xe0kVOe3\nne151A+0xJOI3tX9CttIaQAsXR7cMDinkDITw6i7X4olRMPCSixHLW97cDsVDRGt\necO1d4dP3OGscN+vKCoL6tDKDotzWHYPwjH47sUCgcEAoVI8WCiipbKkMnaTsNsE\ngKXvO0DSgq3k5HjLCbdQldUzIbgfnH7bSKNcBYtiNxjR7OihgRW8qO5GWsnmafCs\n1dy6a/2835id3cnbHRaZflvUFhVDFn2E1bCsstFLyFn3Y0w/cO9yzC/X5sZcVXRF\nit3R0Selakv3JZckru4XMJwx5JWJYMBjIIAc+miknWg3niL+UT6pPun65xG3mXWI\nS+yC7c4rw+dKQ44UMLs2MDHRBoxqi8T0W/x9NkfDszpjAoHAclH7S4ZdvC3RIR0L\nLGoJuvroGbwx1JiGdOINuooNwGuswge2zTIsJi0gN/H3hcB2E6rIFiYid4BrMrwW\nmSeq1LZVS6siu0qw4p4OVy+/CmjfWKQD8j4k6u6PipiK6IMk1JYIlSCr2AS04JjT\njgNgGVVtxVt2cUM9huIXkXjEaRZdzK7boA60NCkIyGJdHWh3LLQdW4zg/A64C0lj\nIMoJBGuQkAKgfRuh7KI6Q6Qom7BM3OCFXdUJUEBQHc2MTyeZAoHAJdBQGBn1RFZ+\nn75AnbTMZJ6Twp2fVjzWUz/+rnXFlo87ynA18MR2BzaDST4Bvda29UBFGb32Mux9\nOHukqLgIE5jDuqWjy4B5eCoxZf/OvwlgXkX9+gprGR3axn/PZBFPbFB4ZmjbWLzn\nbocn7FJCXf+Cm0cMmv1jIIxej19MUU/duq9iq4RkHY2LG+KrSEQIUVmImCftXdN3\n/qNP5JetY0eH6C+KRc8JqDB0nvbqZNOgYXOfYXo/5Gk8XIHTFihm\n-----END RSA PRIVATE KEY-----" - testStr := "The quick brown fox jumps over the lazy dog." - sigHex := "4e05ee9e435653549ac4eddbc43e1a6868636e8ea6dbec2564435afcb0de47e0824cddbd88776ddb20728c53ecc90b5d543d5c37575fda8bd0317025fc07de62ee8084b1a75203b1a23d1ef4ac285da3d1fc63317d5b2cf1aafa3e522acedd366ccd5fe4a7f02a42922237426ca3dc154c57408638b9bfaf0d0213855d4e9ee621db204151bcb13d4dbb18f930ec601469c992c84b14e9e0b6f91ac9517bb3b749dd117e1cbac2e4acb0e549f44558a2005898a226d5b6c8b9291d7abae0d9e0a16858b89662a085f74a202deb867acab792bdbd2c36731217caea8b17bd210c29b890472f11e5afdd1dd7b69004db070e04201778f2c49f5758643881403d45a58d08f51b5c63910c6185892f0b590f191d760b669eff2464456f130239bba94acf54a0cb98f6939ff84ae26a37f9b890be259d9b5d636f6eb367b53e895227d7d79a3a88afd6d28c198ee80f6527437c5fbf63accb81709925c4e03d1c9eaee86f58e4bd1c669d6af042dbd412de0d13b98b1111e2fadbe34b45de52125e9a" - k := data.NewPublicKey(data.RSAKey, []byte(pubPem)) - - sigBytes, err := hex.DecodeString(sigHex) - if err != nil { - t.Fatal(err) - } - v := signed.RSAPyCryptoVerifier{} - err = v.Verify(k, sigBytes, []byte(testStr)) - if err != nil { - t.Fatal(err) - } -} - -func TestPyNaCled25519Compat(t *testing.T) { - pubHex := "846612b43cef909a0e4ea9c818379bca4723a2020619f95e7a0ccc6f0850b7dc" - //privHex := "bf3cdb9b2a664b0460e6755cb689ffca15b6e294f79f9f1fcf90b52e5b063a76" - testStr := "The quick brown fox jumps over the lazy dog." - sigHex := "166e7013e48f26dccb4e68fe4cf558d1cd3af902f8395534336a7f8b4c56588694aa3ac671767246298a59d5ef4224f02c854f41bfcfe70241db4be1546d6a00" - - pub, _ := hex.DecodeString(pubHex) - k := data.NewPublicKey(data.ED25519Key, pub) - - sigBytes, _ := hex.DecodeString(sigHex) + require.NoError(t, err) - err := signed.Verifiers[data.EDDSASignature].Verify(k, sigBytes, []byte(testStr)) - if err != nil { - t.Fatal(err) - } + store.SetMulti(metas) } func testErrorCode(t *testing.T, errorCode int, errType error) { @@ -220,7 +123,7 @@ func testErrorCode(t *testing.T, errorCode int, errType error) { ) require.NoError(t, err) - _, err = store.GetMeta("root", 4801) + _, err = store.GetSized("root", 4801) require.Error(t, err) require.IsType(t, errType, err, fmt.Sprintf("%d should translate to %v", errorCode, errType)) diff --git a/storage/interface.go b/storage/interface.go deleted file mode 100644 index 2951e248f..000000000 --- a/storage/interface.go +++ /dev/null @@ -1,8 +0,0 @@ -package storage - -// Bootstrapper is a thing that can set itself up -type Bootstrapper interface { - // Bootstrap instructs a configured Bootstrapper to perform - // its setup operations. - Bootstrap() error -} diff --git a/tuf/store/interfaces.go b/storage/interfaces.go similarity index 61% rename from tuf/store/interfaces.go rename to storage/interfaces.go index 8ccec3483..f7813247e 100644 --- a/tuf/store/interfaces.go +++ b/storage/interfaces.go @@ -1,4 +1,4 @@ -package store +package storage // NoSizeLimit is represented as -1 for arguments to GetMeta const NoSizeLimit int64 = -1 @@ -6,11 +6,11 @@ const NoSizeLimit int64 = -1 // MetadataStore must be implemented by anything that intends to interact // with a store of TUF files type MetadataStore interface { - GetMeta(name string, size int64) ([]byte, error) - SetMeta(name string, blob []byte) error - SetMultiMeta(map[string][]byte) error + GetSized(name string, size int64) ([]byte, error) + Set(name string, blob []byte) error + SetMulti(map[string][]byte) error RemoveAll() error - RemoveMeta(name string) error + Remove(name string) error } // PublicKeyStore must be implemented by a key service @@ -18,14 +18,16 @@ type PublicKeyStore interface { GetKey(role string) ([]byte, error) } -// LocalStore represents a local TUF sture -type LocalStore interface { - MetadataStore -} - // RemoteStore is similar to LocalStore with the added expectation that it should // provide a way to download targets once located type RemoteStore interface { MetadataStore PublicKeyStore } + +// Bootstrapper is a thing that can set itself up +type Bootstrapper interface { + // Bootstrap instructs a configured Bootstrapper to perform + // its setup operations. + Bootstrap() error +} diff --git a/tuf/store/memorystore.go b/storage/memorystore.go similarity index 53% rename from tuf/store/memorystore.go rename to storage/memorystore.go index 77c03c719..8a2ade54d 100644 --- a/tuf/store/memorystore.go +++ b/storage/memorystore.go @@ -1,50 +1,46 @@ -package store +package storage import ( "crypto/sha256" - "fmt" "github.com/docker/notary" - "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/utils" ) // NewMemoryStore returns a MetadataStore that operates entirely in memory. // Very useful for testing -func NewMemoryStore(meta map[string][]byte) *MemoryStore { +func NewMemoryStore(initial map[string][]byte) *MemoryStore { var consistent = make(map[string][]byte) - if meta == nil { - meta = make(map[string][]byte) + if initial == nil { + initial = make(map[string][]byte) } else { // add all seed meta to consistent - for name, data := range meta { + for name, data := range initial { checksum := sha256.Sum256(data) path := utils.ConsistentName(name, checksum[:]) consistent[path] = data } } return &MemoryStore{ - meta: meta, + data: initial, consistent: consistent, - keys: make(map[string][]data.PrivateKey), } } // MemoryStore implements a mock RemoteStore entirely in memory. // For testing purposes only. type MemoryStore struct { - meta map[string][]byte + data map[string][]byte consistent map[string][]byte - keys map[string][]data.PrivateKey } -// GetMeta returns up to size bytes of data references by name. +// GetSized returns up to size bytes of data references by name. // If size is "NoSizeLimit", this corresponds to "infinite," but we cut off at a // predefined threshold "notary.MaxDownloadSize", as we will always know the // size for everything but a timestamp and sometimes a root, // neither of which should be exceptionally large -func (m *MemoryStore) GetMeta(name string, size int64) ([]byte, error) { - d, ok := m.meta[name] +func (m MemoryStore) GetSized(name string, size int64) ([]byte, error) { + d, ok := m.data[name] if ok { if size == NoSizeLimit { size = notary.MaxDownloadSize @@ -64,9 +60,20 @@ func (m *MemoryStore) GetMeta(name string, size int64) ([]byte, error) { return nil, ErrMetaNotFound{Resource: name} } -// SetMeta sets the metadata value for the given name -func (m *MemoryStore) SetMeta(name string, meta []byte) error { - m.meta[name] = meta +// Get returns the data associated with name +func (m MemoryStore) Get(name string) ([]byte, error) { + if d, ok := m.data[name]; ok { + return d, nil + } + if d, ok := m.consistent[name]; ok { + return d, nil + } + return nil, ErrMetaNotFound{Resource: name} +} + +// Set sets the metadata value for the given name +func (m *MemoryStore) Set(name string, meta []byte) error { + m.data[name] = meta checksum := sha256.Sum256(meta) path := utils.ConsistentName(name, checksum[:]) @@ -74,34 +81,44 @@ func (m *MemoryStore) SetMeta(name string, meta []byte) error { return nil } -// SetMultiMeta sets multiple pieces of metadata for multiple names +// SetMulti sets multiple pieces of metadata for multiple names // in a single operation. -func (m *MemoryStore) SetMultiMeta(metas map[string][]byte) error { +func (m *MemoryStore) SetMulti(metas map[string][]byte) error { for role, blob := range metas { - m.SetMeta(role, blob) + m.Set(role, blob) } return nil } -// RemoveMeta removes the metadata for a single role - if the metadata doesn't +// Remove removes the metadata for a single role - if the metadata doesn't // exist, no error is returned -func (m *MemoryStore) RemoveMeta(name string) error { - if meta, ok := m.meta[name]; ok { +func (m *MemoryStore) Remove(name string) error { + if meta, ok := m.data[name]; ok { checksum := sha256.Sum256(meta) path := utils.ConsistentName(name, checksum[:]) - delete(m.meta, name) + delete(m.data, name) delete(m.consistent, path) } return nil } -// GetKey returns the public key for the given role -func (m *MemoryStore) GetKey(role string) ([]byte, error) { - return nil, fmt.Errorf("GetKey is not implemented for the MemoryStore") -} - // RemoveAll clears the existing memory store by setting this store as new empty one func (m *MemoryStore) RemoveAll() error { *m = *NewMemoryStore(nil) return nil } + +// Location provides a human readable name for the storage location +func (m MemoryStore) Location() string { + return "memory" +} + +// ListFiles returns a list of all files. The names returned should be +// usable with Get directly, with no modification. +func (m *MemoryStore) ListFiles() []string { + names := make([]string, 0, len(m.data)) + for n := range m.data { + names = append(names, n) + } + return names +} diff --git a/tuf/store/memorystore_test.go b/storage/memorystore_test.go similarity index 64% rename from tuf/store/memorystore_test.go rename to storage/memorystore_test.go index 716bb7bc2..353845fbe 100644 --- a/tuf/store/memorystore_test.go +++ b/storage/memorystore_test.go @@ -1,4 +1,4 @@ -package store +package storage import ( "crypto/sha256" @@ -11,31 +11,31 @@ import ( func TestMemoryStoreMetadataOperations(t *testing.T) { s := NewMemoryStore(nil) - // GetMeta of a non-existent metadata fails - _, err := s.GetMeta("nonexistent", 0) + // GetSized of a non-existent metadata fails + _, err := s.GetSized("nonexistent", 0) require.Error(t, err) require.IsType(t, ErrMetaNotFound{}, err) - // Once SetMeta succeeds, GetMeta with the role name and the consistent name + // Once SetMeta succeeds, GetSized with the role name and the consistent name // should succeed metaContent := []byte("content") metaSize := int64(len(metaContent)) shasum := sha256.Sum256(metaContent) invalidShasum := sha256.Sum256([]byte{}) - require.NoError(t, s.SetMeta("exists", metaContent)) - require.NoError(t, s.SetMultiMeta(map[string][]byte{"multi1": metaContent, "multi2": metaContent})) + require.NoError(t, s.Set("exists", metaContent)) + require.NoError(t, s.SetMulti(map[string][]byte{"multi1": metaContent, "multi2": metaContent})) for _, metaName := range []string{"exists", "multi1", "multi2"} { - meta, err := s.GetMeta(metaName, metaSize) + meta, err := s.GetSized(metaName, metaSize) require.NoError(t, err) require.Equal(t, metaContent, meta) - meta, err = s.GetMeta(utils.ConsistentName(metaName, shasum[:]), metaSize) + meta, err = s.GetSized(utils.ConsistentName(metaName, shasum[:]), metaSize) require.NoError(t, err) require.Equal(t, metaContent, meta) - _, err = s.GetMeta(utils.ConsistentName(metaName, invalidShasum[:]), metaSize) + _, err = s.GetSized(utils.ConsistentName(metaName, invalidShasum[:]), metaSize) require.Error(t, err) require.IsType(t, ErrMetaNotFound{}, err) } @@ -44,32 +44,32 @@ func TestMemoryStoreMetadataOperations(t *testing.T) { err = s.RemoveAll() require.NoError(t, err) - _, err = s.GetMeta("exists", 0) + _, err = s.GetSized("exists", 0) require.Error(t, err) require.IsType(t, ErrMetaNotFound{}, err) } -func TestMemoryStoreGetMetaSize(t *testing.T) { +func TestMemoryStoreGetSized(t *testing.T) { content := []byte("content") s := NewMemoryStore(map[string][]byte{"content": content}) // we can get partial size - meta, err := s.GetMeta("content", 3) + meta, err := s.GetSized("content", 3) require.NoError(t, err) require.Equal(t, []byte("con"), meta) // we can get zero size - meta, err = s.GetMeta("content", 0) + meta, err = s.GetSized("content", 0) require.NoError(t, err) require.Equal(t, []byte{}, meta) // we can get the whole thing by passing NoSizeLimit (-1) - meta, err = s.GetMeta("content", NoSizeLimit) + meta, err = s.GetSized("content", NoSizeLimit) require.NoError(t, err) require.Equal(t, content, meta) // a size much larger than the actual length will return the whole thing - meta, err = s.GetMeta("content", 8000) + meta, err = s.GetSized("content", 8000) require.NoError(t, err) require.Equal(t, content, meta) } diff --git a/tuf/store/offlinestore.go b/storage/offlinestore.go similarity index 52% rename from tuf/store/offlinestore.go rename to storage/offlinestore.go index b0f057b2b..a9433c1ad 100644 --- a/tuf/store/offlinestore.go +++ b/storage/offlinestore.go @@ -1,8 +1,4 @@ -package store - -import ( - "io" -) +package storage // ErrOffline is used to indicate we are operating offline type ErrOffline struct{} @@ -17,23 +13,23 @@ var err = ErrOffline{} // returns ErrOffline for every operation type OfflineStore struct{} -// GetMeta returns ErrOffline -func (es OfflineStore) GetMeta(name string, size int64) ([]byte, error) { +// GetSized returns ErrOffline +func (es OfflineStore) GetSized(name string, size int64) ([]byte, error) { return nil, err } -// SetMeta returns ErrOffline -func (es OfflineStore) SetMeta(name string, blob []byte) error { +// Set returns ErrOffline +func (es OfflineStore) Set(name string, blob []byte) error { return err } -// SetMultiMeta returns ErrOffline -func (es OfflineStore) SetMultiMeta(map[string][]byte) error { +// SetMulti returns ErrOffline +func (es OfflineStore) SetMulti(map[string][]byte) error { return err } -// RemoveMeta returns ErrOffline -func (es OfflineStore) RemoveMeta(name string) error { +// Remove returns ErrOffline +func (es OfflineStore) Remove(name string) error { return err } @@ -42,12 +38,12 @@ func (es OfflineStore) GetKey(role string) ([]byte, error) { return nil, err } -// GetTarget returns ErrOffline -func (es OfflineStore) GetTarget(path string) (io.ReadCloser, error) { - return nil, err -} - // RemoveAll return ErrOffline func (es OfflineStore) RemoveAll() error { return err } + +// Location returns a human readable name for the storage location +func (es OfflineStore) Location() string { + return "offline" +} diff --git a/tuf/store/offlinestore_test.go b/storage/offlinestore_test.go similarity index 74% rename from tuf/store/offlinestore_test.go rename to storage/offlinestore_test.go index 66de915e7..659211e42 100644 --- a/tuf/store/offlinestore_test.go +++ b/storage/offlinestore_test.go @@ -1,4 +1,4 @@ -package store +package storage import ( "testing" @@ -8,15 +8,15 @@ import ( func TestOfflineStore(t *testing.T) { s := OfflineStore{} - _, err := s.GetMeta("", 0) + _, err := s.GetSized("", 0) require.Error(t, err) require.IsType(t, ErrOffline{}, err) - err = s.SetMeta("", nil) + err = s.Set("", nil) require.Error(t, err) require.IsType(t, ErrOffline{}, err) - err = s.SetMultiMeta(nil) + err = s.SetMulti(nil) require.Error(t, err) require.IsType(t, ErrOffline{}, err) @@ -24,10 +24,6 @@ func TestOfflineStore(t *testing.T) { require.Error(t, err) require.IsType(t, ErrOffline{}, err) - _, err = s.GetTarget("") - require.Error(t, err) - require.IsType(t, ErrOffline{}, err) - err = s.RemoveAll() require.Error(t, err) require.IsType(t, ErrOffline{}, err) diff --git a/tuf/store/store_test.go b/storage/store_test.go similarity index 66% rename from tuf/store/store_test.go rename to storage/store_test.go index 6ce9be12d..a1dc6b309 100644 --- a/tuf/store/store_test.go +++ b/storage/store_test.go @@ -1,4 +1,4 @@ -package store +package storage import ( "testing" @@ -11,32 +11,32 @@ type storeFactory func() MetadataStore // Verifies that the metadata store can get and set metadata func testGetSetMeta(t *testing.T, factory storeFactory) { s := factory() - metaBytes, err := s.GetMeta("root", 300) + metaBytes, err := s.GetSized("root", 300) require.Error(t, err) require.Nil(t, metaBytes) require.IsType(t, ErrMetaNotFound{}, err) content := []byte("root bytes") - require.NoError(t, s.SetMeta("root", content)) + require.NoError(t, s.Set("root", content)) - metaBytes, err = s.GetMeta("root", 300) + metaBytes, err = s.GetSized("root", 300) require.NoError(t, err) require.Equal(t, content, metaBytes) } // Verifies that the metadata store can delete metadata -func testRemoveMeta(t *testing.T, factory storeFactory) { +func testRemove(t *testing.T, factory storeFactory) { s := factory() - require.NoError(t, s.SetMeta("root", []byte("test data"))) + require.NoError(t, s.Set("root", []byte("test data"))) - require.NoError(t, s.RemoveMeta("root")) - _, err := s.GetMeta("root", 300) + require.NoError(t, s.Remove("root")) + _, err := s.GetSized("root", 300) require.Error(t, err) require.IsType(t, ErrMetaNotFound{}, err) // delete metadata should be successful even if the metadata doesn't exist - require.NoError(t, s.RemoveMeta("root")) + require.NoError(t, s.Remove("root")) } func TestMemoryStoreMetadata(t *testing.T) { @@ -45,5 +45,5 @@ func TestMemoryStoreMetadata(t *testing.T) { } testGetSetMeta(t, factory) - testRemoveMeta(t, factory) + testRemove(t, factory) } diff --git a/trustmanager/filestore.go b/trustmanager/filestore.go deleted file mode 100644 index 7927413a1..000000000 --- a/trustmanager/filestore.go +++ /dev/null @@ -1,150 +0,0 @@ -package trustmanager - -import ( - "fmt" - "io/ioutil" - "os" - "path/filepath" - "strings" -) - -// SimpleFileStore implements FileStore -type SimpleFileStore struct { - baseDir string - fileExt string - perms os.FileMode -} - -// NewFileStore creates a fully configurable file store -func NewFileStore(baseDir, fileExt string, perms os.FileMode) (*SimpleFileStore, error) { - baseDir = filepath.Clean(baseDir) - if err := createDirectory(baseDir, perms); err != nil { - return nil, err - } - if !strings.HasPrefix(fileExt, ".") { - fileExt = "." + fileExt - } - - return &SimpleFileStore{ - baseDir: baseDir, - fileExt: fileExt, - perms: perms, - }, nil -} - -// NewSimpleFileStore is a convenience wrapper to create a world readable, -// owner writeable filestore -func NewSimpleFileStore(baseDir, fileExt string) (*SimpleFileStore, error) { - return NewFileStore(baseDir, fileExt, visible) -} - -// NewPrivateSimpleFileStore is a wrapper to create an owner readable/writeable -// _only_ filestore -func NewPrivateSimpleFileStore(baseDir, fileExt string) (*SimpleFileStore, error) { - return NewFileStore(baseDir, fileExt, private) -} - -// Add writes data to a file with a given name -func (f *SimpleFileStore) Add(name string, data []byte) error { - filePath, err := f.GetPath(name) - if err != nil { - return err - } - createDirectory(filepath.Dir(filePath), f.perms) - return ioutil.WriteFile(filePath, data, f.perms) -} - -// Remove removes a file identified by name -func (f *SimpleFileStore) Remove(name string) error { - // Attempt to remove - filePath, err := f.GetPath(name) - if err != nil { - return err - } - return os.Remove(filePath) -} - -// Get returns the data given a file name -func (f *SimpleFileStore) Get(name string) ([]byte, error) { - filePath, err := f.GetPath(name) - if err != nil { - return nil, err - } - data, err := ioutil.ReadFile(filePath) - if err != nil { - return nil, err - } - - return data, nil -} - -// GetPath returns the full final path of a file with a given name -func (f *SimpleFileStore) GetPath(name string) (string, error) { - fileName := f.genFileName(name) - fullPath := filepath.Clean(filepath.Join(f.baseDir, fileName)) - - if !strings.HasPrefix(fullPath, f.baseDir) { - return "", ErrPathOutsideStore - } - return fullPath, nil -} - -// ListFiles lists all the files inside of a store -func (f *SimpleFileStore) ListFiles() []string { - return f.list(f.baseDir) -} - -// list lists all the files in a directory given a full path. Ignores symlinks. -func (f *SimpleFileStore) list(path string) []string { - files := make([]string, 0, 0) - filepath.Walk(path, func(fp string, fi os.FileInfo, err error) error { - // If there are errors, ignore this particular file - if err != nil { - return nil - } - // Ignore if it is a directory - if fi.IsDir() { - return nil - } - - // If this is a symlink, ignore it - if fi.Mode()&os.ModeSymlink == os.ModeSymlink { - return nil - } - - // Only allow matches that end with our certificate extension (e.g. *.crt) - matched, _ := filepath.Match("*"+f.fileExt, fi.Name()) - - if matched { - // Find the relative path for this file relative to the base path. - fp, err = filepath.Rel(path, fp) - if err != nil { - return err - } - trimmed := strings.TrimSuffix(fp, f.fileExt) - files = append(files, trimmed) - } - return nil - }) - return files -} - -// genFileName returns the name using the right extension -func (f *SimpleFileStore) genFileName(name string) string { - return fmt.Sprintf("%s%s", name, f.fileExt) -} - -// BaseDir returns the base directory of the filestore -func (f *SimpleFileStore) BaseDir() string { - return f.baseDir -} - -// createDirectory receives a string of the path to a directory. -// It does not support passing files, so the caller has to remove -// the filename by doing filepath.Dir(full_path_to_file) -func createDirectory(dir string, perms os.FileMode) error { - // This prevents someone passing /path/to/dir and 'dir' not being created - // If two '//' exist, MkdirAll deals it with correctly - dir = dir + "/" - return os.MkdirAll(dir, perms) -} diff --git a/trustmanager/interfaces.go b/trustmanager/interfaces.go new file mode 100644 index 000000000..2611d436a --- /dev/null +++ b/trustmanager/interfaces.go @@ -0,0 +1,86 @@ +package trustmanager + +import ( + "fmt" + + "github.com/docker/notary/tuf/data" +) + +// Storage implements the bare bones primitives (no hierarchy) +type Storage interface { + // Add writes a file to the specified location, returning an error if this + // is not possible (reasons may include permissions errors). The path is cleaned + // before being made absolute against the store's base dir. + Set(fileName string, data []byte) error + + // Remove deletes a file from the store relative to the store's base directory. + // The path is cleaned before being made absolute to ensure no path traversal + // outside the base directory is possible. + Remove(fileName string) error + + // Get returns the file content found at fileName relative to the base directory + // of the file store. The path is cleaned before being made absolute to ensure + // path traversal outside the store is not possible. If the file is not found + // an error to that effect is returned. + Get(fileName string) ([]byte, error) + + // ListFiles returns a list of paths relative to the base directory of the + // filestore. Any of these paths must be retrievable via the + // Storage.Get method. + ListFiles() []string + + // Location returns a human readable name indicating where the implementer + // is storing keys + Location() string +} + +// ErrAttemptsExceeded is returned when too many attempts have been made to decrypt a key +type ErrAttemptsExceeded struct{} + +// ErrAttemptsExceeded is returned when too many attempts have been made to decrypt a key +func (err ErrAttemptsExceeded) Error() string { + return "maximum number of passphrase attempts exceeded" +} + +// ErrPasswordInvalid is returned when signing fails. It could also mean the signing +// key file was corrupted, but we have no way to distinguish. +type ErrPasswordInvalid struct{} + +// ErrPasswordInvalid is returned when signing fails. It could also mean the signing +// key file was corrupted, but we have no way to distinguish. +func (err ErrPasswordInvalid) Error() string { + return "password invalid, operation has failed." +} + +// ErrKeyNotFound is returned when the keystore fails to retrieve a specific key. +type ErrKeyNotFound struct { + KeyID string +} + +// ErrKeyNotFound is returned when the keystore fails to retrieve a specific key. +func (err ErrKeyNotFound) Error() string { + return fmt.Sprintf("signing key not found: %s", err.KeyID) +} + +const ( + keyExtension = "key" +) + +// KeyStore is a generic interface for private key storage +type KeyStore interface { + // AddKey adds a key to the KeyStore, and if the key already exists, + // succeeds. Otherwise, returns an error if it cannot add. + AddKey(keyInfo KeyInfo, privKey data.PrivateKey) error + // Should fail with ErrKeyNotFound if the keystore is operating normally + // and knows that it does not store the requested key. + GetKey(keyID string) (data.PrivateKey, string, error) + GetKeyInfo(keyID string) (KeyInfo, error) + ListKeys() map[string]KeyInfo + RemoveKey(keyID string) error + Name() string +} + +type cachedKey struct { + alias string + key data.PrivateKey +} diff --git a/trustmanager/keyfilestore.go b/trustmanager/keyfilestore.go deleted file mode 100644 index c82af3e86..000000000 --- a/trustmanager/keyfilestore.go +++ /dev/null @@ -1,476 +0,0 @@ -package trustmanager - -import ( - "encoding/pem" - "fmt" - "path/filepath" - "strings" - "sync" - - "github.com/Sirupsen/logrus" - "github.com/docker/notary" - "github.com/docker/notary/tuf/data" -) - -type keyInfoMap map[string]KeyInfo - -// KeyFileStore persists and manages private keys on disk -type KeyFileStore struct { - sync.Mutex - SimpleFileStore - notary.PassRetriever - cachedKeys map[string]*cachedKey - keyInfoMap -} - -// KeyMemoryStore manages private keys in memory -type KeyMemoryStore struct { - sync.Mutex - MemoryFileStore - notary.PassRetriever - cachedKeys map[string]*cachedKey - keyInfoMap -} - -// KeyInfo stores the role, path, and gun for a corresponding private key ID -// It is assumed that each private key ID is unique -type KeyInfo struct { - Gun string - Role string -} - -// NewKeyFileStore returns a new KeyFileStore creating a private directory to -// hold the keys. -func NewKeyFileStore(baseDir string, passphraseRetriever notary.PassRetriever) (*KeyFileStore, error) { - baseDir = filepath.Join(baseDir, notary.PrivDir) - fileStore, err := NewPrivateSimpleFileStore(baseDir, keyExtension) - if err != nil { - return nil, err - } - cachedKeys := make(map[string]*cachedKey) - keyInfoMap := make(keyInfoMap) - - keyStore := &KeyFileStore{SimpleFileStore: *fileStore, - PassRetriever: passphraseRetriever, - cachedKeys: cachedKeys, - keyInfoMap: keyInfoMap, - } - - // Load this keystore's ID --> gun/role map - keyStore.loadKeyInfo() - return keyStore, nil -} - -func generateKeyInfoMap(s Storage) map[string]KeyInfo { - keyInfoMap := make(map[string]KeyInfo) - for _, keyPath := range s.ListFiles() { - d, err := s.Get(keyPath) - if err != nil { - logrus.Error(err) - continue - } - keyID, keyInfo, err := KeyInfoFromPEM(d, keyPath) - if err != nil { - logrus.Error(err) - continue - } - keyInfoMap[keyID] = keyInfo - } - return keyInfoMap -} - -// Attempts to infer the keyID, role, and GUN from the specified key path. -// Note that non-root roles can only be inferred if this is a legacy style filename: KEYID_ROLE.key -func inferKeyInfoFromKeyPath(keyPath string) (string, string, string) { - var keyID, role, gun string - keyID = filepath.Base(keyPath) - underscoreIndex := strings.LastIndex(keyID, "_") - - // This is the legacy KEYID_ROLE filename - // The keyID is the first part of the keyname - // The keyRole is the second part of the keyname - // in a key named abcde_root, abcde is the keyID and root is the KeyAlias - if underscoreIndex != -1 { - role = keyID[underscoreIndex+1:] - keyID = keyID[:underscoreIndex] - } - - if filepath.HasPrefix(keyPath, notary.RootKeysSubdir+"/") { - return keyID, data.CanonicalRootRole, "" - } - - keyPath = strings.TrimPrefix(keyPath, notary.NonRootKeysSubdir+"/") - gun = getGunFromFullID(keyPath) - return keyID, role, gun -} - -func getGunFromFullID(fullKeyID string) string { - keyGun := filepath.Dir(fullKeyID) - // If the gun is empty, Dir will return . - if keyGun == "." { - keyGun = "" - } - return keyGun -} - -func (s *KeyFileStore) loadKeyInfo() { - s.keyInfoMap = generateKeyInfoMap(s) -} - -func (s *KeyMemoryStore) loadKeyInfo() { - s.keyInfoMap = generateKeyInfoMap(s) -} - -// GetKeyInfo returns the corresponding gun and role key info for a keyID -func (s *KeyFileStore) GetKeyInfo(keyID string) (KeyInfo, error) { - if info, ok := s.keyInfoMap[keyID]; ok { - return info, nil - } - return KeyInfo{}, fmt.Errorf("Could not find info for keyID %s", keyID) -} - -// GetKeyInfo returns the corresponding gun and role key info for a keyID -func (s *KeyMemoryStore) GetKeyInfo(keyID string) (KeyInfo, error) { - if info, ok := s.keyInfoMap[keyID]; ok { - return info, nil - } - return KeyInfo{}, fmt.Errorf("Could not find info for keyID %s", keyID) -} - -// Name returns a user friendly name for the location this store -// keeps its data -func (s *KeyFileStore) Name() string { - return fmt.Sprintf("file (%s)", s.SimpleFileStore.BaseDir()) -} - -// AddKey stores the contents of a PEM-encoded private key as a PEM block -func (s *KeyFileStore) AddKey(keyInfo KeyInfo, privKey data.PrivateKey) error { - s.Lock() - defer s.Unlock() - if keyInfo.Role == data.CanonicalRootRole || data.IsDelegation(keyInfo.Role) || !data.ValidRole(keyInfo.Role) { - keyInfo.Gun = "" - } - err := addKey(s, s.PassRetriever, s.cachedKeys, filepath.Join(keyInfo.Gun, privKey.ID()), keyInfo.Role, privKey) - if err != nil { - return err - } - s.keyInfoMap[privKey.ID()] = keyInfo - return nil -} - -// GetKey returns the PrivateKey given a KeyID -func (s *KeyFileStore) GetKey(name string) (data.PrivateKey, string, error) { - s.Lock() - defer s.Unlock() - // If this is a bare key ID without the gun, prepend the gun so the filestore lookup succeeds - if keyInfo, ok := s.keyInfoMap[name]; ok { - name = filepath.Join(keyInfo.Gun, name) - } - return getKey(s, s.PassRetriever, s.cachedKeys, name) -} - -// ListKeys returns a list of unique PublicKeys present on the KeyFileStore, by returning a copy of the keyInfoMap -func (s *KeyFileStore) ListKeys() map[string]KeyInfo { - return copyKeyInfoMap(s.keyInfoMap) -} - -// RemoveKey removes the key from the keyfilestore -func (s *KeyFileStore) RemoveKey(keyID string) error { - s.Lock() - defer s.Unlock() - // If this is a bare key ID without the gun, prepend the gun so the filestore lookup succeeds - if keyInfo, ok := s.keyInfoMap[keyID]; ok { - keyID = filepath.Join(keyInfo.Gun, keyID) - } - err := removeKey(s, s.cachedKeys, keyID) - if err != nil { - return err - } - // Remove this key from our keyInfo map if we removed from our filesystem - delete(s.keyInfoMap, filepath.Base(keyID)) - return nil -} - -// NewKeyMemoryStore returns a new KeyMemoryStore which holds keys in memory -func NewKeyMemoryStore(passphraseRetriever notary.PassRetriever) *KeyMemoryStore { - memStore := NewMemoryFileStore() - cachedKeys := make(map[string]*cachedKey) - - keyInfoMap := make(keyInfoMap) - - keyStore := &KeyMemoryStore{ - MemoryFileStore: *memStore, - PassRetriever: passphraseRetriever, - cachedKeys: cachedKeys, - keyInfoMap: keyInfoMap, - } - - // Load this keystore's ID --> gun/role map - keyStore.loadKeyInfo() - return keyStore -} - -// Name returns a user friendly name for the location this store -// keeps its data -func (s *KeyMemoryStore) Name() string { - return "memory" -} - -// AddKey stores the contents of a PEM-encoded private key as a PEM block -func (s *KeyMemoryStore) AddKey(keyInfo KeyInfo, privKey data.PrivateKey) error { - s.Lock() - defer s.Unlock() - if keyInfo.Role == data.CanonicalRootRole || data.IsDelegation(keyInfo.Role) || !data.ValidRole(keyInfo.Role) { - keyInfo.Gun = "" - } - err := addKey(s, s.PassRetriever, s.cachedKeys, filepath.Join(keyInfo.Gun, privKey.ID()), keyInfo.Role, privKey) - if err != nil { - return err - } - s.keyInfoMap[privKey.ID()] = keyInfo - return nil -} - -// GetKey returns the PrivateKey given a KeyID -func (s *KeyMemoryStore) GetKey(name string) (data.PrivateKey, string, error) { - s.Lock() - defer s.Unlock() - // If this is a bare key ID without the gun, prepend the gun so the filestore lookup succeeds - if keyInfo, ok := s.keyInfoMap[name]; ok { - name = filepath.Join(keyInfo.Gun, name) - } - return getKey(s, s.PassRetriever, s.cachedKeys, name) -} - -// ListKeys returns a list of unique PublicKeys present on the KeyFileStore, by returning a copy of the keyInfoMap -func (s *KeyMemoryStore) ListKeys() map[string]KeyInfo { - return copyKeyInfoMap(s.keyInfoMap) -} - -// copyKeyInfoMap returns a deep copy of the passed-in keyInfoMap -func copyKeyInfoMap(keyInfoMap map[string]KeyInfo) map[string]KeyInfo { - copyMap := make(map[string]KeyInfo) - for keyID, keyInfo := range keyInfoMap { - copyMap[keyID] = KeyInfo{Role: keyInfo.Role, Gun: keyInfo.Gun} - } - return copyMap -} - -// RemoveKey removes the key from the keystore -func (s *KeyMemoryStore) RemoveKey(keyID string) error { - s.Lock() - defer s.Unlock() - // If this is a bare key ID without the gun, prepend the gun so the filestore lookup succeeds - if keyInfo, ok := s.keyInfoMap[keyID]; ok { - keyID = filepath.Join(keyInfo.Gun, keyID) - } - err := removeKey(s, s.cachedKeys, keyID) - if err != nil { - return err - } - // Remove this key from our keyInfo map if we removed from our filesystem - delete(s.keyInfoMap, filepath.Base(keyID)) - return nil -} - -// KeyInfoFromPEM attempts to get a keyID and KeyInfo from the filename and PEM bytes of a key -func KeyInfoFromPEM(pemBytes []byte, filename string) (string, KeyInfo, error) { - keyID, role, gun := inferKeyInfoFromKeyPath(filename) - if role == "" { - block, _ := pem.Decode(pemBytes) - if block == nil { - return "", KeyInfo{}, fmt.Errorf("could not decode PEM block for key %s", filename) - } - if keyRole, ok := block.Headers["role"]; ok { - role = keyRole - } - } - return keyID, KeyInfo{Gun: gun, Role: role}, nil -} - -func addKey(s Storage, passphraseRetriever notary.PassRetriever, cachedKeys map[string]*cachedKey, name, role string, privKey data.PrivateKey) error { - - var ( - chosenPassphrase string - giveup bool - err error - ) - - for attempts := 0; ; attempts++ { - chosenPassphrase, giveup, err = passphraseRetriever(name, role, true, attempts) - if err != nil { - continue - } - if giveup { - return ErrAttemptsExceeded{} - } - if attempts > 10 { - return ErrAttemptsExceeded{} - } - break - } - - return encryptAndAddKey(s, chosenPassphrase, cachedKeys, name, role, privKey) -} - -// getKeyRole finds the role for the given keyID. It attempts to look -// both in the newer format PEM headers, and also in the legacy filename -// format. It returns: the role, whether it was found in the legacy format -// (true == legacy), and an error -func getKeyRole(s Storage, keyID string) (string, bool, error) { - name := strings.TrimSpace(strings.TrimSuffix(filepath.Base(keyID), filepath.Ext(keyID))) - - for _, file := range s.ListFiles() { - filename := filepath.Base(file) - - if strings.HasPrefix(filename, name) { - d, err := s.Get(file) - if err != nil { - return "", false, err - } - block, _ := pem.Decode(d) - if block != nil { - if role, ok := block.Headers["role"]; ok { - return role, false, nil - } - } - - role := strings.TrimPrefix(filename, name+"_") - return role, true, nil - } - } - - return "", false, ErrKeyNotFound{KeyID: keyID} -} - -// GetKey returns the PrivateKey given a KeyID -func getKey(s Storage, passphraseRetriever notary.PassRetriever, cachedKeys map[string]*cachedKey, name string) (data.PrivateKey, string, error) { - cachedKeyEntry, ok := cachedKeys[name] - if ok { - return cachedKeyEntry.key, cachedKeyEntry.alias, nil - } - - keyBytes, keyAlias, err := getRawKey(s, name) - if err != nil { - return nil, "", err - } - - // See if the key is encrypted. If its encrypted we'll fail to parse the private key - privKey, err := ParsePEMPrivateKey(keyBytes, "") - if err != nil { - privKey, _, err = GetPasswdDecryptBytes(passphraseRetriever, keyBytes, name, string(keyAlias)) - if err != nil { - return nil, "", err - } - } - cachedKeys[name] = &cachedKey{alias: keyAlias, key: privKey} - return privKey, keyAlias, nil -} - -// RemoveKey removes the key from the keyfilestore -func removeKey(s Storage, cachedKeys map[string]*cachedKey, name string) error { - role, legacy, err := getKeyRole(s, name) - if err != nil { - return err - } - - delete(cachedKeys, name) - - if legacy { - name = name + "_" + role - } - - // being in a subdirectory is for backwards compatibliity - err = s.Remove(filepath.Join(getSubdir(role), name)) - if err != nil { - return err - } - return nil -} - -// Assumes 2 subdirectories, 1 containing root keys and 1 containing TUF keys -func getSubdir(alias string) string { - if alias == data.CanonicalRootRole { - return notary.RootKeysSubdir - } - return notary.NonRootKeysSubdir -} - -// Given a key ID, gets the bytes and alias belonging to that key if the key -// exists -func getRawKey(s Storage, name string) ([]byte, string, error) { - role, legacy, err := getKeyRole(s, name) - if err != nil { - return nil, "", err - } - - if legacy { - name = name + "_" + role - } - - var keyBytes []byte - keyBytes, err = s.Get(filepath.Join(getSubdir(role), name)) - if err != nil { - return nil, "", err - } - return keyBytes, role, nil -} - -// GetPasswdDecryptBytes gets the password to decrypt the given pem bytes. -// Returns the password and private key -func GetPasswdDecryptBytes(passphraseRetriever notary.PassRetriever, pemBytes []byte, name, alias string) (data.PrivateKey, string, error) { - var ( - passwd string - retErr error - privKey data.PrivateKey - ) - for attempts := 0; ; attempts++ { - var ( - giveup bool - err error - ) - passwd, giveup, err = passphraseRetriever(name, alias, false, attempts) - // Check if the passphrase retriever got an error or if it is telling us to give up - if giveup || err != nil { - return nil, "", ErrPasswordInvalid{} - } - if attempts > 10 { - return nil, "", ErrAttemptsExceeded{} - } - - // Try to convert PEM encoded bytes back to a PrivateKey using the passphrase - privKey, err = ParsePEMPrivateKey(pemBytes, passwd) - if err != nil { - retErr = ErrPasswordInvalid{} - } else { - // We managed to parse the PrivateKey. We've succeeded! - retErr = nil - break - } - } - if retErr != nil { - return nil, "", retErr - } - return privKey, passwd, nil -} - -func encryptAndAddKey(s Storage, passwd string, cachedKeys map[string]*cachedKey, name, role string, privKey data.PrivateKey) error { - - var ( - pemPrivKey []byte - err error - ) - - if passwd != "" { - pemPrivKey, err = EncryptPrivateKey(privKey, role, passwd) - } else { - pemPrivKey, err = KeyToPEM(privKey, role) - } - - if err != nil { - return err - } - - cachedKeys[name] = &cachedKey{alias: role, key: privKey} - return s.Add(filepath.Join(getSubdir(role), name), pemPrivKey) -} diff --git a/trustmanager/keystore.go b/trustmanager/keystore.go index e620aa453..c57d28f44 100644 --- a/trustmanager/keystore.go +++ b/trustmanager/keystore.go @@ -1,58 +1,355 @@ package trustmanager import ( + "encoding/pem" "fmt" + "path/filepath" + "strings" + "sync" + "github.com/Sirupsen/logrus" + "github.com/docker/notary" + store "github.com/docker/notary/storage" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" ) -// ErrAttemptsExceeded is returned when too many attempts have been made to decrypt a key -type ErrAttemptsExceeded struct{} +type keyInfoMap map[string]KeyInfo -// ErrAttemptsExceeded is returned when too many attempts have been made to decrypt a key -func (err ErrAttemptsExceeded) Error() string { - return "maximum number of passphrase attempts exceeded" +// KeyInfo stores the role, path, and gun for a corresponding private key ID +// It is assumed that each private key ID is unique +type KeyInfo struct { + Gun string + Role string } -// ErrPasswordInvalid is returned when signing fails. It could also mean the signing -// key file was corrupted, but we have no way to distinguish. -type ErrPasswordInvalid struct{} +// GenericKeyStore is a wrapper for Storage instances that provides +// translation between the []byte form and Public/PrivateKey objects +type GenericKeyStore struct { + store Storage + sync.Mutex + notary.PassRetriever + cachedKeys map[string]*cachedKey + keyInfoMap +} -// ErrPasswordInvalid is returned when signing fails. It could also mean the signing -// key file was corrupted, but we have no way to distinguish. -func (err ErrPasswordInvalid) Error() string { - return "password invalid, operation has failed." +// NewKeyFileStore returns a new KeyFileStore creating a private directory to +// hold the keys. +func NewKeyFileStore(baseDir string, p notary.PassRetriever) (*GenericKeyStore, error) { + baseDir = filepath.Join(baseDir, notary.PrivDir) + fileStore, err := store.NewPrivateSimpleFileStore(baseDir, keyExtension) + if err != nil { + return nil, err + } + return NewGenericKeyStore(fileStore, p), nil } -// ErrKeyNotFound is returned when the keystore fails to retrieve a specific key. -type ErrKeyNotFound struct { - KeyID string +// NewKeyMemoryStore returns a new KeyMemoryStore which holds keys in memory +func NewKeyMemoryStore(p notary.PassRetriever) *GenericKeyStore { + memStore := store.NewMemoryStore(nil) + return NewGenericKeyStore(memStore, p) } -// ErrKeyNotFound is returned when the keystore fails to retrieve a specific key. -func (err ErrKeyNotFound) Error() string { - return fmt.Sprintf("signing key not found: %s", err.KeyID) +// NewGenericKeyStore creates a GenericKeyStore wrapping the provided +// Storage instance, using the PassRetriever to enc/decrypt keys +func NewGenericKeyStore(s Storage, p notary.PassRetriever) *GenericKeyStore { + ks := GenericKeyStore{ + store: s, + PassRetriever: p, + cachedKeys: make(map[string]*cachedKey), + keyInfoMap: make(keyInfoMap), + } + ks.loadKeyInfo() + return &ks } -const ( - keyExtension = "key" -) +func generateKeyInfoMap(s Storage) map[string]KeyInfo { + keyInfoMap := make(map[string]KeyInfo) + for _, keyPath := range s.ListFiles() { + d, err := s.Get(keyPath) + if err != nil { + logrus.Error(err) + continue + } + keyID, keyInfo, err := KeyInfoFromPEM(d, keyPath) + if err != nil { + logrus.Error(err) + continue + } + keyInfoMap[keyID] = keyInfo + } + return keyInfoMap +} + +// Attempts to infer the keyID, role, and GUN from the specified key path. +// Note that non-root roles can only be inferred if this is a legacy style filename: KEYID_ROLE.key +func inferKeyInfoFromKeyPath(keyPath string) (string, string, string) { + var keyID, role, gun string + keyID = filepath.Base(keyPath) + underscoreIndex := strings.LastIndex(keyID, "_") + + // This is the legacy KEYID_ROLE filename + // The keyID is the first part of the keyname + // The keyRole is the second part of the keyname + // in a key named abcde_root, abcde is the keyID and root is the KeyAlias + if underscoreIndex != -1 { + role = keyID[underscoreIndex+1:] + keyID = keyID[:underscoreIndex] + } + + if filepath.HasPrefix(keyPath, notary.RootKeysSubdir+"/") { + return keyID, data.CanonicalRootRole, "" + } + + keyPath = strings.TrimPrefix(keyPath, notary.NonRootKeysSubdir+"/") + gun = getGunFromFullID(keyPath) + return keyID, role, gun +} + +func getGunFromFullID(fullKeyID string) string { + keyGun := filepath.Dir(fullKeyID) + // If the gun is empty, Dir will return . + if keyGun == "." { + keyGun = "" + } + return keyGun +} + +func (s *GenericKeyStore) loadKeyInfo() { + s.keyInfoMap = generateKeyInfoMap(s.store) +} + +// GetKeyInfo returns the corresponding gun and role key info for a keyID +func (s *GenericKeyStore) GetKeyInfo(keyID string) (KeyInfo, error) { + if info, ok := s.keyInfoMap[keyID]; ok { + return info, nil + } + return KeyInfo{}, fmt.Errorf("Could not find info for keyID %s", keyID) +} + +// AddKey stores the contents of a PEM-encoded private key as a PEM block +func (s *GenericKeyStore) AddKey(keyInfo KeyInfo, privKey data.PrivateKey) error { + var ( + chosenPassphrase string + giveup bool + err error + pemPrivKey []byte + ) + s.Lock() + defer s.Unlock() + if keyInfo.Role == data.CanonicalRootRole || data.IsDelegation(keyInfo.Role) || !data.ValidRole(keyInfo.Role) { + keyInfo.Gun = "" + } + name := filepath.Join(keyInfo.Gun, privKey.ID()) + for attempts := 0; ; attempts++ { + chosenPassphrase, giveup, err = s.PassRetriever(name, keyInfo.Role, true, attempts) + if err != nil { + continue + } + if giveup || attempts > 10 { + return ErrAttemptsExceeded{} + } + break + } + + if chosenPassphrase != "" { + pemPrivKey, err = utils.EncryptPrivateKey(privKey, keyInfo.Role, chosenPassphrase) + } else { + pemPrivKey, err = utils.KeyToPEM(privKey, keyInfo.Role) + } + + if err != nil { + return err + } + + s.cachedKeys[name] = &cachedKey{alias: keyInfo.Role, key: privKey} + err = s.store.Set(filepath.Join(getSubdir(keyInfo.Role), name), pemPrivKey) + if err != nil { + return err + } + s.keyInfoMap[privKey.ID()] = keyInfo + return nil +} + +// GetKey returns the PrivateKey given a KeyID +func (s *GenericKeyStore) GetKey(name string) (data.PrivateKey, string, error) { + s.Lock() + defer s.Unlock() + // If this is a bare key ID without the gun, prepend the gun so the filestore lookup succeeds + if keyInfo, ok := s.keyInfoMap[name]; ok { + name = filepath.Join(keyInfo.Gun, name) + } + + cachedKeyEntry, ok := s.cachedKeys[name] + if ok { + return cachedKeyEntry.key, cachedKeyEntry.alias, nil + } + + keyAlias, legacy, err := getKeyRole(s.store, name) + if err != nil { + return nil, "", err + } + + if legacy { + name = name + "_" + keyAlias + } + + keyBytes, err := s.store.Get(filepath.Join(getSubdir(keyAlias), name)) + if err != nil { + return nil, "", err + } + + // See if the key is encrypted. If its encrypted we'll fail to parse the private key + privKey, err := utils.ParsePEMPrivateKey(keyBytes, "") + if err != nil { + privKey, _, err = GetPasswdDecryptBytes(s.PassRetriever, keyBytes, name, string(keyAlias)) + if err != nil { + return nil, "", err + } + } + s.cachedKeys[name] = &cachedKey{alias: keyAlias, key: privKey} + return privKey, keyAlias, nil +} + +// ListKeys returns a list of unique PublicKeys present on the KeyFileStore, by returning a copy of the keyInfoMap +func (s *GenericKeyStore) ListKeys() map[string]KeyInfo { + return copyKeyInfoMap(s.keyInfoMap) +} + +// RemoveKey removes the key from the keyfilestore +func (s *GenericKeyStore) RemoveKey(keyID string) error { + s.Lock() + defer s.Unlock() + // If this is a bare key ID without the gun, prepend the gun so the filestore lookup succeeds + if keyInfo, ok := s.keyInfoMap[keyID]; ok { + keyID = filepath.Join(keyInfo.Gun, keyID) + } + + role, legacy, err := getKeyRole(s.store, keyID) + if err != nil { + return err + } + + delete(s.cachedKeys, keyID) + + name := keyID + if legacy { + name = keyID + "_" + role + } + + // being in a subdirectory is for backwards compatibliity + err = s.store.Remove(filepath.Join(getSubdir(role), name)) + if err != nil { + return err + } + + // Remove this key from our keyInfo map if we removed from our filesystem + delete(s.keyInfoMap, filepath.Base(keyID)) + return nil +} + +// Name returns a user friendly name for the location this store +// keeps its data +func (s *GenericKeyStore) Name() string { + return s.store.Location() +} + +// copyKeyInfoMap returns a deep copy of the passed-in keyInfoMap +func copyKeyInfoMap(keyInfoMap map[string]KeyInfo) map[string]KeyInfo { + copyMap := make(map[string]KeyInfo) + for keyID, keyInfo := range keyInfoMap { + copyMap[keyID] = KeyInfo{Role: keyInfo.Role, Gun: keyInfo.Gun} + } + return copyMap +} + +// KeyInfoFromPEM attempts to get a keyID and KeyInfo from the filename and PEM bytes of a key +func KeyInfoFromPEM(pemBytes []byte, filename string) (string, KeyInfo, error) { + keyID, role, gun := inferKeyInfoFromKeyPath(filename) + if role == "" { + block, _ := pem.Decode(pemBytes) + if block == nil { + return "", KeyInfo{}, fmt.Errorf("could not decode PEM block for key %s", filename) + } + if keyRole, ok := block.Headers["role"]; ok { + role = keyRole + } + } + return keyID, KeyInfo{Gun: gun, Role: role}, nil +} + +// getKeyRole finds the role for the given keyID. It attempts to look +// both in the newer format PEM headers, and also in the legacy filename +// format. It returns: the role, whether it was found in the legacy format +// (true == legacy), and an error +func getKeyRole(s Storage, keyID string) (string, bool, error) { + name := strings.TrimSpace(strings.TrimSuffix(filepath.Base(keyID), filepath.Ext(keyID))) + + for _, file := range s.ListFiles() { + filename := filepath.Base(file) + + if strings.HasPrefix(filename, name) { + d, err := s.Get(file) + if err != nil { + return "", false, err + } + block, _ := pem.Decode(d) + if block != nil { + if role, ok := block.Headers["role"]; ok { + return role, false, nil + } + } + + role := strings.TrimPrefix(filename, name+"_") + return role, true, nil + } + } + + return "", false, ErrKeyNotFound{KeyID: keyID} +} + +// Assumes 2 subdirectories, 1 containing root keys and 1 containing TUF keys +func getSubdir(alias string) string { + if alias == data.CanonicalRootRole { + return notary.RootKeysSubdir + } + return notary.NonRootKeysSubdir +} + +// GetPasswdDecryptBytes gets the password to decrypt the given pem bytes. +// Returns the password and private key +func GetPasswdDecryptBytes(passphraseRetriever notary.PassRetriever, pemBytes []byte, name, alias string) (data.PrivateKey, string, error) { + var ( + passwd string + retErr error + privKey data.PrivateKey + ) + for attempts := 0; ; attempts++ { + var ( + giveup bool + err error + ) + passwd, giveup, err = passphraseRetriever(name, alias, false, attempts) + // Check if the passphrase retriever got an error or if it is telling us to give up + if giveup || err != nil { + return nil, "", ErrPasswordInvalid{} + } + if attempts > 10 { + return nil, "", ErrAttemptsExceeded{} + } -// KeyStore is a generic interface for private key storage -type KeyStore interface { - // AddKey adds a key to the KeyStore, and if the key already exists, - // succeeds. Otherwise, returns an error if it cannot add. - AddKey(keyInfo KeyInfo, privKey data.PrivateKey) error - // Should fail with ErrKeyNotFound if the keystore is operating normally - // and knows that it does not store the requested key. - GetKey(keyID string) (data.PrivateKey, string, error) - GetKeyInfo(keyID string) (KeyInfo, error) - ListKeys() map[string]KeyInfo - RemoveKey(keyID string) error - Name() string -} - -type cachedKey struct { - alias string - key data.PrivateKey + // Try to convert PEM encoded bytes back to a PrivateKey using the passphrase + privKey, err = utils.ParsePEMPrivateKey(pemBytes, passwd) + if err != nil { + retErr = ErrPasswordInvalid{} + } else { + // We managed to parse the PrivateKey. We've succeeded! + retErr = nil + break + } + } + if retErr != nil { + return nil, "", retErr + } + return privKey, passwd, nil } diff --git a/trustmanager/keyfilestore_test.go b/trustmanager/keystore_test.go similarity index 97% rename from trustmanager/keyfilestore_test.go rename to trustmanager/keystore_test.go index 82e4f6412..12ecb2297 100644 --- a/trustmanager/keyfilestore_test.go +++ b/trustmanager/keystore_test.go @@ -11,6 +11,7 @@ import ( "github.com/docker/notary" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" "github.com/stretchr/testify/require" ) @@ -45,7 +46,7 @@ func testAddKeyWithRole(t *testing.T, role, expectedSubdir string) { store, err := NewKeyFileStore(tempBaseDir, passphraseRetriever) require.NoError(t, err, "failed to create new key filestore") - privKey, err := GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") // Since we're generating this manually we need to add the extension '.' @@ -85,11 +86,11 @@ func TestKeyStoreInternalState(t *testing.T) { roleToID := make(map[string]string) for _, role := range roles { // generate a key for the role - privKey, err := GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") // generate the correct PEM role header - privKeyPEM, err := KeyToPEM(privKey, role) + privKeyPEM, err := utils.KeyToPEM(privKey, role) require.NoError(t, err, "could not generate PEM") // write the key file to the correct location @@ -149,7 +150,7 @@ func TestKeyStoreInternalState(t *testing.T) { require.False(t, ok) // Generate a new targets key and add it with its gun, check that the map gets updated back - privKey, err := GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") require.NoError(t, store.AddKey(KeyInfo{Role: data.CanonicalTargetsRole, Gun: gun}, privKey)) require.Equal(t, gun, store.keyInfoMap[privKey.ID()].Gun) @@ -245,7 +246,7 @@ EMl3eFOJXjIch/wIesRSN+2dGOsl7neercjMh1i9RvpCwHDx/E0= if success { require.NoError(t, err, "failed to get %s key from store (it's in %s)", role, expectedSubdir) - pemPrivKey, err := KeyToPEM(privKey, role) + pemPrivKey, err := utils.KeyToPEM(privKey, role) require.NoError(t, err, "failed to convert key to PEM") require.Equal(t, testData, pemPrivKey) @@ -336,7 +337,7 @@ func TestListKeys(t *testing.T) { for i, role := range roles { // Make a new key for each role - privKey, err := GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") // Call the AddKey function @@ -379,7 +380,7 @@ func TestAddGetKeyMemStore(t *testing.T) { // Create our store store := NewKeyMemoryStore(passphraseRetriever) - privKey, err := GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") // Call the AddKey function @@ -401,7 +402,7 @@ func TestAddGetKeyInfoMemStore(t *testing.T) { // Create our store store := NewKeyMemoryStore(passphraseRetriever) - rootKey, err := GenerateECDSAKey(rand.Reader) + rootKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") // Call the AddKey function @@ -414,7 +415,7 @@ func TestAddGetKeyInfoMemStore(t *testing.T) { require.Equal(t, data.CanonicalRootRole, rootInfo.Role) require.Equal(t, "", rootInfo.Gun) - targetsKey, err := GenerateECDSAKey(rand.Reader) + targetsKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") // Call the AddKey function @@ -427,7 +428,7 @@ func TestAddGetKeyInfoMemStore(t *testing.T) { require.Equal(t, data.CanonicalTargetsRole, targetsInfo.Role) require.Equal(t, gun, targetsInfo.Gun) - delgKey, err := GenerateECDSAKey(rand.Reader) + delgKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") // Call the AddKey function @@ -455,7 +456,7 @@ func TestGetDecryptedWithTamperedCipherText(t *testing.T) { require.NoError(t, err, "failed to create new key filestore") // Generate a new Private Key - privKey, err := GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") // Call the AddEncryptedKey function @@ -546,7 +547,7 @@ func testGetDecryptedWithInvalidPassphrase(t *testing.T, store KeyStore, newStor testAlias := data.CanonicalRootRole // Generate a new random RSA Key - privKey, err := GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") // Call the AddKey function @@ -581,7 +582,7 @@ func testRemoveKeyWithRole(t *testing.T, role, expectedSubdir string) { store, err := NewKeyFileStore(tempBaseDir, passphraseRetriever) require.NoError(t, err, "failed to create new key filestore") - privKey, err := GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") // Since we're generating this manually we need to add the extension '.' @@ -624,7 +625,7 @@ func TestKeysAreCached(t *testing.T) { store, err := NewKeyFileStore(tempBaseDir, countingPassphraseRetriever) require.NoError(t, err, "failed to create new key filestore") - privKey, err := GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err, "could not generate private key") // Call the AddKey function diff --git a/trustmanager/memorystore.go b/trustmanager/memorystore.go deleted file mode 100644 index d93e6c72c..000000000 --- a/trustmanager/memorystore.go +++ /dev/null @@ -1,54 +0,0 @@ -package trustmanager - -import "os" - -// MemoryFileStore is an implementation of Storage that keeps the contents in -// memory. It is not thread-safe and should be used by a higher-level interface -// that provides locking. -type MemoryFileStore struct { - files map[string][]byte -} - -// NewMemoryFileStore creates a MemoryFileStore -func NewMemoryFileStore() *MemoryFileStore { - return &MemoryFileStore{ - files: make(map[string][]byte), - } -} - -// Add writes data to a file with a given name -func (f *MemoryFileStore) Add(name string, data []byte) error { - f.files[name] = data - return nil -} - -// Remove removes a file identified by name -func (f *MemoryFileStore) Remove(name string) error { - if _, present := f.files[name]; !present { - return os.ErrNotExist - } - delete(f.files, name) - - return nil -} - -// Get returns the data given a file name -func (f *MemoryFileStore) Get(name string) ([]byte, error) { - fileData, present := f.files[name] - if !present { - return nil, os.ErrNotExist - } - - return fileData, nil -} - -// ListFiles lists all the files inside of a store -func (f *MemoryFileStore) ListFiles() []string { - var list []string - - for name := range f.files { - list = append(list, name) - } - - return list -} diff --git a/trustmanager/store.go b/trustmanager/store.go deleted file mode 100644 index c3b23469b..000000000 --- a/trustmanager/store.go +++ /dev/null @@ -1,42 +0,0 @@ -package trustmanager - -import ( - "errors" - - "github.com/docker/notary" -) - -const ( - visible = notary.PubCertPerms - private = notary.PrivKeyPerms -) - -var ( - // ErrPathOutsideStore indicates that the returned path would be - // outside the store - ErrPathOutsideStore = errors.New("path outside file store") -) - -// Storage implements the bare bones primitives (no hierarchy) -type Storage interface { - // Add writes a file to the specified location, returning an error if this - // is not possible (reasons may include permissions errors). The path is cleaned - // before being made absolute against the store's base dir. - Add(fileName string, data []byte) error - - // Remove deletes a file from the store relative to the store's base directory. - // The path is cleaned before being made absolute to ensure no path traversal - // outside the base directory is possible. - Remove(fileName string) error - - // Get returns the file content found at fileName relative to the base directory - // of the file store. The path is cleaned before being made absolute to ensure - // path traversal outside the store is not possible. If the file is not found - // an error to that effect is returned. - Get(fileName string) ([]byte, error) - - // ListFiles returns a list of paths relative to the base directory of the - // filestore. Any of these paths must be retrievable via the - // Storage.Get method. - ListFiles() []string -} diff --git a/trustmanager/yubikey/yubikeystore.go b/trustmanager/yubikey/yubikeystore.go index 86b4e718a..ed13f0411 100644 --- a/trustmanager/yubikey/yubikeystore.go +++ b/trustmanager/yubikey/yubikeystore.go @@ -21,6 +21,7 @@ import ( "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" + "github.com/docker/notary/tuf/utils" "github.com/miekg/pkcs11" ) @@ -249,7 +250,7 @@ func addECDSAKey( // Hard-coded policy: the generated certificate expires in 10 years. startTime := time.Now() - template, err := trustmanager.NewCertificate(role, startTime, startTime.AddDate(10, 0, 0)) + template, err := utils.NewCertificate(role, startTime, startTime.AddDate(10, 0, 0)) if err != nil { return fmt.Errorf("failed to create the certificate template: %v", err) } diff --git a/trustmanager/yubikey/yubikeystore_test.go b/trustmanager/yubikey/yubikeystore_test.go index e9b120a0a..c3d1a691c 100644 --- a/trustmanager/yubikey/yubikeystore_test.go +++ b/trustmanager/yubikey/yubikeystore_test.go @@ -12,6 +12,7 @@ import ( "github.com/docker/notary/passphrase" "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" "github.com/miekg/pkcs11" "github.com/stretchr/testify/require" ) @@ -57,7 +58,7 @@ func TestEnsurePrivateKeySizePadsLessThanRequiredSizeArrays(t *testing.T) { } func testAddKey(t *testing.T, store trustmanager.KeyStore) (data.PrivateKey, error) { - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) err = store.AddKey(trustmanager.KeyInfo{Role: data.CanonicalRootRole, Gun: ""}, privKey) @@ -250,7 +251,7 @@ func TestYubiAddKeyCanAddToMiddleSlot(t *testing.T) { } type nonworkingBackup struct { - trustmanager.KeyMemoryStore + trustmanager.GenericKeyStore } // AddKey stores the contents of a PEM-encoded private key as a PEM block @@ -273,7 +274,7 @@ func TestYubiAddKeyRollsBackIfCannotBackup(t *testing.T) { }() backup := &nonworkingBackup{ - KeyMemoryStore: *trustmanager.NewKeyMemoryStore(ret), + GenericKeyStore: *trustmanager.NewKeyMemoryStore(ret), } store, err := NewYubiStore(backup, ret) require.NoError(t, err) diff --git a/trustpinning/certs.go b/trustpinning/certs.go index 0e584b743..05487d575 100644 --- a/trustpinning/certs.go +++ b/trustpinning/certs.go @@ -8,9 +8,9 @@ import ( "time" "github.com/Sirupsen/logrus" - "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" + "github.com/docker/notary/tuf/utils" ) // ErrValidationFail is returned when there is no valid trusted certificates @@ -123,7 +123,7 @@ func ValidateRoot(prevRoot *data.SignedRoot, root *data.Signed, gun string, trus } err = signed.VerifySignatures( - root, data.BaseRole{Keys: trustmanager.CertsToKeys(trustedLeafCerts, allTrustedIntCerts), Threshold: prevRootRoleData.Threshold}) + root, data.BaseRole{Keys: utils.CertsToKeys(trustedLeafCerts, allTrustedIntCerts), Threshold: prevRootRoleData.Threshold}) if err != nil { logrus.Debugf("failed to verify TUF data for: %s, %v", gun, err) return nil, &ErrRootRotationFail{Reason: "failed to validate data with current trusted certificates"} @@ -152,7 +152,7 @@ func ValidateRoot(prevRoot *data.SignedRoot, root *data.Signed, gun string, trus // Note that certsFromRoot is guaranteed to be unchanged only if we had prior cert data for this GUN or enabled TOFUS // If we attempted to pin a certain certificate or CA, certsFromRoot could have been pruned accordingly err = signed.VerifySignatures(root, data.BaseRole{ - Keys: trustmanager.CertsToKeys(certsFromRoot, allIntCerts), Threshold: rootRole.Threshold}) + Keys: utils.CertsToKeys(certsFromRoot, allIntCerts), Threshold: rootRole.Threshold}) if err != nil { logrus.Debugf("failed to verify TUF data for: %s, %v", gun, err) return nil, &ErrValidationFail{Reason: "failed to validate integrity of roots"} @@ -233,14 +233,14 @@ func parseAllCerts(signedRoot *data.SignedRoot) (map[string]*x509.Certificate, m // Decode all the x509 certificates that were bundled with this // Specific root key - decodedCerts, err := trustmanager.LoadCertBundleFromPEM(key.Public()) + decodedCerts, err := utils.LoadCertBundleFromPEM(key.Public()) if err != nil { logrus.Debugf("error while parsing root certificate with keyID: %s, %v", keyID, err) continue } // Get all non-CA certificates in the decoded certificates - leafCertList := trustmanager.GetLeafCerts(decodedCerts) + leafCertList := utils.GetLeafCerts(decodedCerts) // If we got no leaf certificates or we got more than one, fail if len(leafCertList) != 1 { @@ -260,7 +260,7 @@ func parseAllCerts(signedRoot *data.SignedRoot) (map[string]*x509.Certificate, m leafCerts[key.ID()] = leafCert // Get all the remainder certificates marked as a CA to be used as intermediates - intermediateCerts := trustmanager.GetIntermediateCerts(decodedCerts) + intermediateCerts := utils.GetIntermediateCerts(decodedCerts) intCerts[key.ID()] = intermediateCerts } diff --git a/trustpinning/certs_test.go b/trustpinning/certs_test.go index 4f26936d1..a037723c4 100644 --- a/trustpinning/certs_test.go +++ b/trustpinning/certs_test.go @@ -24,6 +24,7 @@ import ( "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" "github.com/docker/notary/tuf/testutils" + "github.com/docker/notary/tuf/utils" "github.com/stretchr/testify/require" ) @@ -444,13 +445,13 @@ func TestValidateRootWithPinnedCA(t *testing.T) { require.Error(t, err) // Now construct a new root with a valid cert chain, such that signatures are correct over the 'notary-signer' GUN. Pin the root-ca and validate - leafCert, err := trustmanager.LoadCertFromFile("../fixtures/notary-signer.crt") + leafCert, err := utils.LoadCertFromFile("../fixtures/notary-signer.crt") require.NoError(t, err) - intermediateCert, err := trustmanager.LoadCertFromFile("../fixtures/intermediate-ca.crt") + intermediateCert, err := utils.LoadCertFromFile("../fixtures/intermediate-ca.crt") require.NoError(t, err) - pemChainBytes, err := trustmanager.CertChainToPEM([]*x509.Certificate{leafCert, intermediateCert}) + pemChainBytes, err := utils.CertChainToPEM([]*x509.Certificate{leafCert, intermediateCert}) require.NoError(t, err) newRootKey := data.NewPublicKey(data.RSAx509Key, pemChainBytes) @@ -474,7 +475,7 @@ func TestValidateRootWithPinnedCA(t *testing.T) { require.NoError(t, err, "could not open key file") pemBytes, err := ioutil.ReadAll(keyReader) require.NoError(t, err, "could not read key file") - privKey, err := trustmanager.ParsePEMPrivateKey(pemBytes, "") + privKey, err := utils.ParsePEMPrivateKey(pemBytes, "") require.NoError(t, err) store, err := trustmanager.NewKeyFileStore(tempBaseDir, passphraseRetriever) @@ -500,7 +501,7 @@ func TestValidateRootWithPinnedCA(t *testing.T) { require.Equal(t, newTypedSignedRoot, validatedRoot) // Add an expired CA for the same gun to our previous pinned bundle, ensure that we still validate correctly - goodRootCABundle, err := trustmanager.LoadCertBundleFromFile(validCAFilepath) + goodRootCABundle, err := utils.LoadCertBundleFromFile(validCAFilepath) require.NoError(t, err) memKeyStore := trustmanager.NewKeyMemoryStore(passphraseRetriever) cryptoService := cryptoservice.NewCryptoService(memKeyStore) @@ -510,7 +511,7 @@ func TestValidateRootWithPinnedCA(t *testing.T) { require.NoError(t, err) expiredCert, err := generateExpiredTestingCertificate(testPrivKey, "notary-signer") require.NoError(t, err) - bundleWithExpiredCert, err := trustmanager.CertChainToPEM(append(goodRootCABundle, expiredCert)) + bundleWithExpiredCert, err := utils.CertChainToPEM(append(goodRootCABundle, expiredCert)) require.NoError(t, err) bundleWithExpiredCertPath := filepath.Join(tempBaseDir, "bundle_with_expired_cert.pem") require.NoError(t, ioutil.WriteFile(bundleWithExpiredCertPath, bundleWithExpiredCert, 0644)) @@ -526,7 +527,7 @@ func TestValidateRootWithPinnedCA(t *testing.T) { require.NoError(t, err) expiredCert2, err := generateExpiredTestingCertificate(testPrivKey2, "notary-signer") require.NoError(t, err) - allExpiredCertBundle, err := trustmanager.CertChainToPEM([]*x509.Certificate{expiredCert, expiredCert2}) + allExpiredCertBundle, err := utils.CertChainToPEM([]*x509.Certificate{expiredCert, expiredCert2}) require.NoError(t, err) allExpiredCertPath := filepath.Join(tempBaseDir, "all_expired_cert.pem") require.NoError(t, ioutil.WriteFile(allExpiredCertPath, allExpiredCertBundle, 0644)) @@ -541,7 +542,7 @@ func TestValidateRootWithPinnedCA(t *testing.T) { require.NoError(t, err) validCert, err := cryptoservice.GenerateCertificate(testPrivKey3, "notary-signer", time.Now(), time.Now().AddDate(1, 0, 0)) require.NoError(t, err) - bundleWithWrongCert, err := trustmanager.CertChainToPEM([]*x509.Certificate{validCert}) + bundleWithWrongCert, err := utils.CertChainToPEM([]*x509.Certificate{validCert}) require.NoError(t, err) bundleWithWrongCertPath := filepath.Join(tempBaseDir, "bundle_with_expired_cert.pem") require.NoError(t, ioutil.WriteFile(bundleWithWrongCertPath, bundleWithWrongCert, 0644)) diff --git a/trustpinning/trustpin.go b/trustpinning/trustpin.go index 351aed266..5ddfe14e5 100644 --- a/trustpinning/trustpin.go +++ b/trustpinning/trustpin.go @@ -4,7 +4,6 @@ import ( "crypto/x509" "fmt" "github.com/Sirupsen/logrus" - "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/utils" "strings" ) @@ -39,14 +38,14 @@ func NewTrustPinChecker(trustPinConfig TrustPinConfig, gun string) (CertChecker, if caFilepath, err := getPinnedCAFilepathByPrefix(gun, trustPinConfig); err == nil { // Try to add the CA certs from its bundle file to our certificate store, // and use it to validate certs in the root.json later - caCerts, err := trustmanager.LoadCertBundleFromFile(caFilepath) + caCerts, err := utils.LoadCertBundleFromFile(caFilepath) if err != nil { return nil, fmt.Errorf("could not load root cert from CA path") } // Now only consider certificates that are direct children from this CA cert chain caRootPool := x509.NewCertPool() for _, caCert := range caCerts { - if err = trustmanager.ValidateCertificate(caCert); err != nil { + if err = utils.ValidateCertificate(caCert); err != nil { continue } caRootPool.AddCert(caCert) @@ -68,7 +67,7 @@ func NewTrustPinChecker(trustPinConfig TrustPinConfig, gun string) (CertChecker, func (t trustPinChecker) certsCheck(leafCert *x509.Certificate, intCerts []*x509.Certificate) bool { // reconstruct the leaf + intermediate cert chain, which is bundled as {leaf, intermediates...}, // in order to get the matching id in the root file - key, err := trustmanager.CertBundleToKey(leafCert, intCerts) + key, err := utils.CertBundleToKey(leafCert, intCerts) if err != nil { logrus.Debug("error creating cert bundle: ", err.Error()) return false diff --git a/tuf/client/client.go b/tuf/client/client.go index 90ab698f4..4b7b4bc1a 100644 --- a/tuf/client/client.go +++ b/tuf/client/client.go @@ -5,9 +5,9 @@ import ( "github.com/Sirupsen/logrus" "github.com/docker/notary" - tuf "github.com/docker/notary/tuf" + store "github.com/docker/notary/storage" + "github.com/docker/notary/tuf" "github.com/docker/notary/tuf/data" - "github.com/docker/notary/tuf/store" ) // Client is a usability wrapper around a raw TUF repo @@ -88,7 +88,7 @@ func (c *Client) downloadRoot() error { logrus.Debugf("Loading root with no expected checksum") // get the cached root, if it exists, just for version checking - cachedRoot, _ := c.cache.GetMeta(role, -1) + cachedRoot, _ := c.cache.GetSized(role, -1) // prefer to download a new root _, remoteErr := c.tryLoadRemote(consistentInfo, cachedRoot) return remoteErr @@ -107,7 +107,7 @@ func (c *Client) downloadTimestamp() error { consistentInfo := c.newBuilder.GetConsistentInfo(role) // get the cached timestamp, if it exists - cachedTS, cachedErr := c.cache.GetMeta(role, notary.MaxTimestampSize) + cachedTS, cachedErr := c.cache.GetSized(role, notary.MaxTimestampSize) // always get the remote timestamp, since it supercedes the local one _, remoteErr := c.tryLoadRemote(consistentInfo, cachedTS) @@ -188,7 +188,7 @@ func (c Client) getTargetsFile(role data.DelegationRole, ci tuf.ConsistentInfo) } func (c *Client) tryLoadCacheThenRemote(consistentInfo tuf.ConsistentInfo) ([]byte, error) { - cachedTS, err := c.cache.GetMeta(consistentInfo.RoleName, consistentInfo.Length()) + cachedTS, err := c.cache.GetSized(consistentInfo.RoleName, consistentInfo.Length()) if err != nil { logrus.Debugf("no %s in cache, must download", consistentInfo.RoleName) return c.tryLoadRemote(consistentInfo, nil) @@ -205,7 +205,7 @@ func (c *Client) tryLoadCacheThenRemote(consistentInfo tuf.ConsistentInfo) ([]by func (c *Client) tryLoadRemote(consistentInfo tuf.ConsistentInfo, old []byte) ([]byte, error) { consistentName := consistentInfo.ConsistentName() - raw, err := c.remote.GetMeta(consistentName, consistentInfo.Length()) + raw, err := c.remote.GetSized(consistentName, consistentInfo.Length()) if err != nil { logrus.Debugf("error downloading %s: %s", consistentName, err) return old, err @@ -222,7 +222,7 @@ func (c *Client) tryLoadRemote(consistentInfo tuf.ConsistentInfo, old []byte) ([ return raw, err } logrus.Debugf("successfully verified downloaded %s", consistentName) - if err := c.cache.SetMeta(consistentInfo.RoleName, raw); err != nil { + if err := c.cache.Set(consistentInfo.RoleName, raw); err != nil { logrus.Debugf("Unable to write %s to cache: %s", consistentInfo.RoleName, err) } return raw, nil diff --git a/tuf/signed/ed25519.go b/tuf/signed/ed25519.go index bc884bdbe..eef673b9d 100644 --- a/tuf/signed/ed25519.go +++ b/tuf/signed/ed25519.go @@ -6,6 +6,7 @@ import ( "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" ) type edCryptoKey struct { @@ -72,7 +73,7 @@ func (e *Ed25519) Create(role, gun, algorithm string) (data.PublicKey, error) { return nil, errors.New("only ED25519 supported by this cryptoservice") } - private, err := trustmanager.GenerateED25519Key(rand.Reader) + private, err := utils.GenerateED25519Key(rand.Reader) if err != nil { return nil, err } diff --git a/tuf/signed/sign_test.go b/tuf/signed/sign_test.go index 022c5c40c..482092ad3 100644 --- a/tuf/signed/sign_test.go +++ b/tuf/signed/sign_test.go @@ -12,6 +12,7 @@ import ( "github.com/docker/notary/cryptoservice" "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" "github.com/stretchr/testify/require" ) @@ -208,14 +209,14 @@ func TestSignReturnsNoSigs(t *testing.T) { func TestSignWithX509(t *testing.T) { // generate a key becase we need a cert - privKey, err := trustmanager.GenerateRSAKey(rand.Reader, 1024) + privKey, err := utils.GenerateRSAKey(rand.Reader, 1024) require.NoError(t, err) // make a RSA x509 key cert, err := cryptoservice.GenerateCertificate(privKey, "test", time.Now(), time.Now().AddDate(10, 0, 0)) require.NoError(t, err) - tufRSAx509Key := trustmanager.CertToKey(cert) + tufRSAx509Key := utils.CertToKey(cert) require.NoError(t, err) // test signing against a service that only recognizes a RSAKey (not @@ -335,7 +336,7 @@ func TestSignMinSignatures(t *testing.T) { } func TestSignFailingKeys(t *testing.T) { - privKey, err := trustmanager.GenerateECDSAKey(rand.Reader) + privKey, err := utils.GenerateECDSAKey(rand.Reader) require.NoError(t, err) cs := &MockCryptoService{FailingPrivateKey{privKey}} diff --git a/tuf/signed/verifiers_test.go b/tuf/signed/verifiers_test.go index a9209cff5..8aadef4be 100644 --- a/tuf/signed/verifiers_test.go +++ b/tuf/signed/verifiers_test.go @@ -433,6 +433,39 @@ func TestRSAPyCryptoVerifierInvalidKeyType(t *testing.T) { require.IsType(t, ErrInvalidKeyType{}, err) } +func TestPyCryptoRSAPSSCompat(t *testing.T) { + pubPem := "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAnKuXZeefa2LmgxaL5NsM\nzKOHNe+x/nL6ik+lDBCTV6OdcwAhHQS+PONGhrChIUVR6Vth3hUCrreLzPO73Oo5\nVSCuRJ53UronENl6lsa5mFKP8StYLvIDITNvkoT3j52BJIjyNUK9UKY9As2TNqDf\nBEPIRp28ev/NViwGOEkBu2UAbwCIdnDXm8JQErCZA0Ydm7PKGgjLbFsFGrVzqXHK\n6pdzJXlhr9yap3UpgQ/iO9JtoEYB2EXsnSrPc9JRjR30bNHHtnVql3fvinXrAEwq\n3xmN4p+R4VGzfdQN+8Kl/IPjqWB535twhFYEG/B7Ze8IwbygBjK3co/KnOPqMUrM\nBI8ztvPiogz+MvXb8WvarZ6TMTh8ifZI96r7zzqyzjR1hJulEy3IsMGvz8XS2J0X\n7sXoaqszEtXdq5ef5zKVxkiyIQZcbPgmpHLq4MgfdryuVVc/RPASoRIXG4lKaTJj\n1ANMFPxDQpHudCLxwCzjCb+sVa20HBRPTnzo8LSZkI6jAgMBAAE=\n-----END PUBLIC KEY-----" + testStr := "The quick brown fox jumps over the lazy dog." + sigHex := "4e05ee9e435653549ac4eddbc43e1a6868636e8ea6dbec2564435afcb0de47e0824cddbd88776ddb20728c53ecc90b5d543d5c37575fda8bd0317025fc07de62ee8084b1a75203b1a23d1ef4ac285da3d1fc63317d5b2cf1aafa3e522acedd366ccd5fe4a7f02a42922237426ca3dc154c57408638b9bfaf0d0213855d4e9ee621db204151bcb13d4dbb18f930ec601469c992c84b14e9e0b6f91ac9517bb3b749dd117e1cbac2e4acb0e549f44558a2005898a226d5b6c8b9291d7abae0d9e0a16858b89662a085f74a202deb867acab792bdbd2c36731217caea8b17bd210c29b890472f11e5afdd1dd7b69004db070e04201778f2c49f5758643881403d45a58d08f51b5c63910c6185892f0b590f191d760b669eff2464456f130239bba94acf54a0cb98f6939ff84ae26a37f9b890be259d9b5d636f6eb367b53e895227d7d79a3a88afd6d28c198ee80f6527437c5fbf63accb81709925c4e03d1c9eaee86f58e4bd1c669d6af042dbd412de0d13b98b1111e2fadbe34b45de52125e9a" + k := data.NewPublicKey(data.RSAKey, []byte(pubPem)) + + sigBytes, err := hex.DecodeString(sigHex) + if err != nil { + t.Fatal(err) + } + v := RSAPyCryptoVerifier{} + err = v.Verify(k, sigBytes, []byte(testStr)) + if err != nil { + t.Fatal(err) + } +} + +func TestPyNaCled25519Compat(t *testing.T) { + pubHex := "846612b43cef909a0e4ea9c818379bca4723a2020619f95e7a0ccc6f0850b7dc" + testStr := "The quick brown fox jumps over the lazy dog." + sigHex := "166e7013e48f26dccb4e68fe4cf558d1cd3af902f8395534336a7f8b4c56588694aa3ac671767246298a59d5ef4224f02c854f41bfcfe70241db4be1546d6a00" + + pub, _ := hex.DecodeString(pubHex) + k := data.NewPublicKey(data.ED25519Key, pub) + + sigBytes, _ := hex.DecodeString(sigHex) + + err := Verifiers[data.EDDSASignature].Verify(k, sigBytes, []byte(testStr)) + if err != nil { + t.Fatal(err) + } +} + func rsaPSSSign(privKey data.PrivateKey, hash crypto.Hash, hashed []byte) ([]byte, error) { if privKey, ok := privKey.(*data.RSAPrivateKey); !ok { return nil, fmt.Errorf("private key type not supported: %s", privKey.Algorithm()) diff --git a/tuf/store/filestore.go b/tuf/store/filestore.go deleted file mode 100644 index 401e7ee42..000000000 --- a/tuf/store/filestore.go +++ /dev/null @@ -1,102 +0,0 @@ -package store - -import ( - "fmt" - "github.com/docker/notary" - "io/ioutil" - "os" - "path" - "path/filepath" -) - -// NewFilesystemStore creates a new store in a directory tree -func NewFilesystemStore(baseDir, metaSubDir, metaExtension string) (*FilesystemStore, error) { - metaDir := path.Join(baseDir, metaSubDir) - - // Make sure we can create the necessary dirs and they are writable - err := os.MkdirAll(metaDir, 0700) - if err != nil { - return nil, err - } - - return &FilesystemStore{ - baseDir: baseDir, - metaDir: metaDir, - metaExtension: metaExtension, - }, nil -} - -// FilesystemStore is a store in a locally accessible directory -type FilesystemStore struct { - baseDir string - metaDir string - metaExtension string -} - -func (f *FilesystemStore) getPath(name string) string { - fileName := fmt.Sprintf("%s.%s", name, f.metaExtension) - return filepath.Join(f.metaDir, fileName) -} - -// GetMeta returns the meta for the given name (a role) up to size bytes -// If size is "NoSizeLimit", this corresponds to "infinite," but we cut off at a -// predefined threshold "notary.MaxDownloadSize". -func (f *FilesystemStore) GetMeta(name string, size int64) ([]byte, error) { - meta, err := ioutil.ReadFile(f.getPath(name)) - if err != nil { - if os.IsNotExist(err) { - err = ErrMetaNotFound{Resource: name} - } - return nil, err - } - if size == NoSizeLimit { - size = notary.MaxDownloadSize - } - // Only return up to size bytes - if int64(len(meta)) < size { - return meta, nil - } - return meta[:size], nil -} - -// SetMultiMeta sets the metadata for multiple roles in one operation -func (f *FilesystemStore) SetMultiMeta(metas map[string][]byte) error { - for role, blob := range metas { - err := f.SetMeta(role, blob) - if err != nil { - return err - } - } - return nil -} - -// SetMeta sets the meta for a single role -func (f *FilesystemStore) SetMeta(name string, meta []byte) error { - fp := f.getPath(name) - - // Ensures the parent directories of the file we are about to write exist - err := os.MkdirAll(filepath.Dir(fp), 0700) - if err != nil { - return err - } - - // if something already exists, just delete it and re-write it - os.RemoveAll(fp) - - // Write the file to disk - if err = ioutil.WriteFile(fp, meta, 0600); err != nil { - return err - } - return nil -} - -// RemoveAll clears the existing filestore by removing its base directory -func (f *FilesystemStore) RemoveAll() error { - return os.RemoveAll(f.baseDir) -} - -// RemoveMeta removes the metadata for a single role - if the metadata doesn't -// exist, no error is returned -func (f *FilesystemStore) RemoveMeta(name string) error { - return os.RemoveAll(f.getPath(name)) // RemoveAll succeeds if path doesn't exist -} diff --git a/tuf/store/filestore_test.go b/tuf/store/filestore_test.go deleted file mode 100644 index ebdd1944c..000000000 --- a/tuf/store/filestore_test.go +++ /dev/null @@ -1,138 +0,0 @@ -package store - -import ( - "io/ioutil" - "os" - "path" - "path/filepath" - "testing" - - "github.com/stretchr/testify/require" -) - -const testDir = "/tmp/testFilesystemStore/" - -func TestNewFilesystemStore(t *testing.T) { - _, err := NewFilesystemStore(testDir, "metadata", "json") - require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) - defer os.RemoveAll(testDir) - - info, err := os.Stat(path.Join(testDir, "metadata")) - require.Nil(t, err, "Error attempting to stat metadata dir: %v", err) - require.NotNil(t, info, "Nil FileInfo from stat on metadata dir") - require.True(t, 0700&info.Mode() != 0, "Metadata directory is not writable") -} - -func TestSetMeta(t *testing.T) { - s, err := NewFilesystemStore(testDir, "metadata", "json") - require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) - defer os.RemoveAll(testDir) - - testContent := []byte("test data") - - err = s.SetMeta("testMeta", testContent) - require.Nil(t, err, "SetMeta returned unexpected error: %v", err) - - content, err := ioutil.ReadFile(path.Join(testDir, "metadata", "testMeta.json")) - require.Nil(t, err, "Error reading file: %v", err) - require.Equal(t, testContent, content, "Content written to file was corrupted.") -} - -func TestSetMetaWithNoParentDirectory(t *testing.T) { - s, err := NewFilesystemStore(testDir, "metadata", "json") - require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) - defer os.RemoveAll(testDir) - - testContent := []byte("test data") - - err = s.SetMeta("noexist/"+"testMeta", testContent) - require.Nil(t, err, "SetMeta returned unexpected error: %v", err) - - content, err := ioutil.ReadFile(path.Join(testDir, "metadata", "noexist/testMeta.json")) - require.Nil(t, err, "Error reading file: %v", err) - require.Equal(t, testContent, content, "Content written to file was corrupted.") -} - -// if something already existed there, remove it first and write a new file -func TestSetMetaRemovesExistingFileBeforeWriting(t *testing.T) { - s, err := NewFilesystemStore(testDir, "metadata", "json") - require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) - defer os.RemoveAll(testDir) - - // make a directory where we want metadata to go - os.Mkdir(filepath.Join(testDir, "metadata", "root.json"), 0700) - - testContent := []byte("test data") - err = s.SetMeta("root", testContent) - require.NoError(t, err, "SetMeta returned unexpected error: %v", err) - - content, err := ioutil.ReadFile(path.Join(testDir, "metadata", "root.json")) - require.NoError(t, err, "Error reading file: %v", err) - require.Equal(t, testContent, content, "Content written to file was corrupted.") -} - -func TestGetMeta(t *testing.T) { - s, err := NewFilesystemStore(testDir, "metadata", "json") - require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) - defer os.RemoveAll(testDir) - - testContent := []byte("test data") - - ioutil.WriteFile(path.Join(testDir, "metadata", "testMeta.json"), testContent, 0600) - - content, err := s.GetMeta("testMeta", int64(len(testContent))) - require.Nil(t, err, "GetMeta returned unexpected error: %v", err) - - require.Equal(t, testContent, content, "Content read from file was corrupted.") - - // Check that NoSizeLimit size reads everything - content, err = s.GetMeta("testMeta", NoSizeLimit) - require.Nil(t, err, "GetMeta returned unexpected error: %v", err) - - require.Equal(t, testContent, content, "Content read from file was corrupted.") - - // Check that we return only up to size bytes - content, err = s.GetMeta("testMeta", 4) - require.Nil(t, err, "GetMeta returned unexpected error: %v", err) - - require.Equal(t, []byte("test"), content, "Content read from file was corrupted.") -} - -func TestGetSetMetadata(t *testing.T) { - s, err := NewFilesystemStore(testDir, "metadata", "json") - require.NoError(t, err, "Initializing FilesystemStore returned unexpected error", err) - defer os.RemoveAll(testDir) - - testGetSetMeta(t, func() MetadataStore { return s }) -} - -func TestRemoveMetadata(t *testing.T) { - s, err := NewFilesystemStore(testDir, "metadata", "json") - require.NoError(t, err, "Initializing FilesystemStore returned unexpected error", err) - defer os.RemoveAll(testDir) - - testRemoveMeta(t, func() MetadataStore { return s }) -} - -func TestRemoveAll(t *testing.T) { - s, err := NewFilesystemStore(testDir, "metadata", "json") - require.Nil(t, err, "Initializing FilesystemStore returned unexpected error: %v", err) - defer os.RemoveAll(testDir) - - testContent := []byte("test data") - - // Write some files in metadata and targets dirs - metaPath := path.Join(testDir, "metadata", "testMeta.json") - ioutil.WriteFile(metaPath, testContent, 0600) - - // Remove all - err = s.RemoveAll() - require.Nil(t, err, "Removing all from FilesystemStore returned unexpected error: %v", err) - - // Test that files no longer exist - _, err = ioutil.ReadFile(metaPath) - require.True(t, os.IsNotExist(err)) - - // Removing the empty filestore returns nil - require.Nil(t, s.RemoveAll()) -} diff --git a/tuf/testutils/corrupt_memorystore.go b/tuf/testutils/corrupt_memorystore.go index ee5afa39d..857d5d7e6 100644 --- a/tuf/testutils/corrupt_memorystore.go +++ b/tuf/testutils/corrupt_memorystore.go @@ -1,7 +1,7 @@ package testutils import ( - "github.com/docker/notary/tuf/store" + store "github.com/docker/notary/storage" ) // CorruptingMemoryStore corrupts all data returned by GetMeta @@ -16,10 +16,10 @@ func NewCorruptingMemoryStore(meta map[string][]byte) *CorruptingMemoryStore { return &CorruptingMemoryStore{MemoryStore: *s} } -// GetMeta returns up to size bytes of meta identified by string. It will +// GetSized returns up to size bytes of meta identified by string. It will // always be corrupted by setting the first character to } -func (cm CorruptingMemoryStore) GetMeta(name string, size int64) ([]byte, error) { - d, err := cm.MemoryStore.GetMeta(name, size) +func (cm CorruptingMemoryStore) GetSized(name string, size int64) ([]byte, error) { + d, err := cm.MemoryStore.GetSized(name, size) if err != nil { return nil, err } @@ -39,9 +39,9 @@ func NewLongMemoryStore(meta map[string][]byte) *LongMemoryStore { return &LongMemoryStore{MemoryStore: *s} } -// GetMeta returns one byte too much -func (lm LongMemoryStore) GetMeta(name string, size int64) ([]byte, error) { - d, err := lm.MemoryStore.GetMeta(name, size) +// GetSized returns one byte too much +func (lm LongMemoryStore) GetSized(name string, size int64) ([]byte, error) { + d, err := lm.MemoryStore.GetSized(name, size) if err != nil { return nil, err } @@ -61,9 +61,9 @@ func NewShortMemoryStore(meta map[string][]byte) *ShortMemoryStore { return &ShortMemoryStore{MemoryStore: *s} } -// GetMeta returns one byte too few -func (sm ShortMemoryStore) GetMeta(name string, size int64) ([]byte, error) { - d, err := sm.MemoryStore.GetMeta(name, size) +// GetSized returns one byte too few +func (sm ShortMemoryStore) GetSized(name string, size int64) ([]byte, error) { + d, err := sm.MemoryStore.GetSized(name, size) if err != nil { return nil, err } diff --git a/tuf/testutils/interfaces/cryptoservice.go b/tuf/testutils/interfaces/cryptoservice.go index 3c26228b5..d1c31dda1 100644 --- a/tuf/testutils/interfaces/cryptoservice.go +++ b/tuf/testutils/interfaces/cryptoservice.go @@ -4,9 +4,9 @@ import ( "crypto/rand" "testing" - "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" + "github.com/docker/notary/tuf/utils" "github.com/stretchr/testify/require" ) @@ -91,11 +91,11 @@ func AddGetKeyCryptoServiceInterfaceBehaviorTests(t *testing.T, cs signed.Crypto role := data.BaseRoles[i+1] switch algo { case data.RSAKey: - addedPrivKey, err = trustmanager.GenerateRSAKey(rand.Reader, 2048) + addedPrivKey, err = utils.GenerateRSAKey(rand.Reader, 2048) case data.ECDSAKey: - addedPrivKey, err = trustmanager.GenerateECDSAKey(rand.Reader) + addedPrivKey, err = utils.GenerateECDSAKey(rand.Reader) case data.ED25519Key: - addedPrivKey, err = trustmanager.GenerateED25519Key(rand.Reader) + addedPrivKey, err = utils.GenerateED25519Key(rand.Reader) default: require.FailNow(t, "invalid algorithm %s", algo) } @@ -123,11 +123,11 @@ func AddListKeyCryptoServiceInterfaceBehaviorTests(t *testing.T, cs signed.Crypt role := data.BaseRoles[i+1] switch algo { case data.RSAKey: - addedPrivKey, err = trustmanager.GenerateRSAKey(rand.Reader, 2048) + addedPrivKey, err = utils.GenerateRSAKey(rand.Reader, 2048) case data.ECDSAKey: - addedPrivKey, err = trustmanager.GenerateECDSAKey(rand.Reader) + addedPrivKey, err = utils.GenerateECDSAKey(rand.Reader) case data.ED25519Key: - addedPrivKey, err = trustmanager.GenerateED25519Key(rand.Reader) + addedPrivKey, err = utils.GenerateED25519Key(rand.Reader) default: require.FailNow(t, "invalid algorithm %s", algo) } diff --git a/tuf/testutils/repo.go b/tuf/testutils/repo.go index 96c8de8cc..17caa755c 100644 --- a/tuf/testutils/repo.go +++ b/tuf/testutils/repo.go @@ -12,6 +12,7 @@ import ( "github.com/docker/notary/passphrase" "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" + "github.com/docker/notary/tuf/utils" "github.com/stretchr/testify/require" tuf "github.com/docker/notary/tuf" @@ -40,9 +41,9 @@ func CreateKey(cs signed.CryptoService, gun, role, keyAlgorithm string) (data.Pu // Keep the x509 key type consistent with the key's algorithm switch keyAlgorithm { case data.RSAKey: - key = data.NewRSAx509PublicKey(trustmanager.CertToPEM(cert)) + key = data.NewRSAx509PublicKey(utils.CertToPEM(cert)) case data.ECDSAKey: - key = data.NewECDSAx509PublicKey(trustmanager.CertToPEM(cert)) + key = data.NewECDSAx509PublicKey(utils.CertToPEM(cert)) default: // This should be impossible because of the Create() call above, but just in case return nil, fmt.Errorf("invalid key algorithm type") diff --git a/tuf/testutils/swizzler.go b/tuf/testutils/swizzler.go index b66a793f7..39fd210a7 100644 --- a/tuf/testutils/swizzler.go +++ b/tuf/testutils/swizzler.go @@ -8,11 +8,11 @@ import ( "github.com/docker/go/canonical/json" "github.com/docker/notary/cryptoservice" "github.com/docker/notary/passphrase" + store "github.com/docker/notary/storage" "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" - "github.com/docker/notary/tuf/store" ) // ErrNoKeyForRole returns an error when the cryptoservice provided to @@ -89,7 +89,7 @@ func serializeMetadata(cs signed.CryptoService, s *data.Signed, role string, // gets a Signed from the metadata store func signedFromStore(cache store.MetadataStore, role string) (*data.Signed, error) { - b, err := cache.GetMeta(role, store.NoSizeLimit) + b, err := cache.GetSized(role, store.NoSizeLimit) if err != nil { return nil, err } @@ -122,23 +122,23 @@ func NewMetadataSwizzler(gun string, initialMetadata map[string][]byte, // SetInvalidJSON corrupts metadata into something that is no longer valid JSON func (m *MetadataSwizzler) SetInvalidJSON(role string) error { - metaBytes, err := m.MetadataCache.GetMeta(role, store.NoSizeLimit) + metaBytes, err := m.MetadataCache.GetSized(role, store.NoSizeLimit) if err != nil { return err } - return m.MetadataCache.SetMeta(role, metaBytes[5:]) + return m.MetadataCache.Set(role, metaBytes[5:]) } // AddExtraSpace adds an extra space to the beginning and end of the serialized // JSON bytes, which should not affect serialization, but will change the checksum // of the file. func (m *MetadataSwizzler) AddExtraSpace(role string) error { - metaBytes, err := m.MetadataCache.GetMeta(role, store.NoSizeLimit) + metaBytes, err := m.MetadataCache.GetSized(role, store.NoSizeLimit) if err != nil { return err } newBytes := append(append([]byte{' '}, metaBytes...), ' ') - return m.MetadataCache.SetMeta(role, newBytes) + return m.MetadataCache.Set(role, newBytes) } // SetInvalidSigned corrupts the metadata into something that is valid JSON, @@ -155,7 +155,7 @@ func (m *MetadataSwizzler) SetInvalidSigned(role string) error { if err != nil { return err } - return m.MetadataCache.SetMeta(role, metaBytes) + return m.MetadataCache.Set(role, metaBytes) } // SetInvalidSignedMeta corrupts the metadata into something that is unmarshallable @@ -190,7 +190,7 @@ func (m *MetadataSwizzler) SetInvalidSignedMeta(role string) error { if err != nil { return err } - return m.MetadataCache.SetMeta(role, metaBytes) + return m.MetadataCache.Set(role, metaBytes) } // TODO: corrupt metadata in such a way that it can be unmarshalled as a @@ -225,7 +225,7 @@ func (m *MetadataSwizzler) SetInvalidMetadataType(role string) error { if err != nil { return err } - return m.MetadataCache.SetMeta(role, metaBytes) + return m.MetadataCache.Set(role, metaBytes) } // InvalidateMetadataSignatures signs with the right key(s) but wrong hash @@ -248,7 +248,7 @@ func (m *MetadataSwizzler) InvalidateMetadataSignatures(role string) error { if err != nil { return err } - return m.MetadataCache.SetMeta(role, metaBytes) + return m.MetadataCache.Set(role, metaBytes) } // TODO: AddExtraSignedInfo - add an extra field to Signed that doesn't get @@ -257,7 +257,7 @@ func (m *MetadataSwizzler) InvalidateMetadataSignatures(role string) error { // RemoveMetadata deletes the metadata entirely func (m *MetadataSwizzler) RemoveMetadata(role string) error { - return m.MetadataCache.RemoveMeta(role) + return m.MetadataCache.Remove(role) } // SignMetadataWithInvalidKey signs the metadata with the wrong key @@ -278,7 +278,7 @@ func (m *MetadataSwizzler) SignMetadataWithInvalidKey(role string) error { if err != nil { return err } - return m.MetadataCache.SetMeta(role, metaBytes) + return m.MetadataCache.Set(role, metaBytes) } // OffsetMetadataVersion updates the metadata version @@ -313,7 +313,7 @@ func (m *MetadataSwizzler) OffsetMetadataVersion(role string, offset int) error if err != nil { return err } - return m.MetadataCache.SetMeta(role, metaBytes) + return m.MetadataCache.Set(role, metaBytes) } // ExpireMetadata expires the metadata, which would make it invalid - don't do anything if @@ -345,7 +345,7 @@ func (m *MetadataSwizzler) ExpireMetadata(role string) error { if err != nil { return err } - return m.MetadataCache.SetMeta(role, metaBytes) + return m.MetadataCache.Set(role, metaBytes) } // SetThreshold sets a threshold for a metadata role - can invalidate metadata for which @@ -357,7 +357,7 @@ func (m *MetadataSwizzler) SetThreshold(role string, newThreshold int) error { roleSpecifier = path.Dir(role) } - b, err := m.MetadataCache.GetMeta(roleSpecifier, store.NoSizeLimit) + b, err := m.MetadataCache.GetSized(roleSpecifier, store.NoSizeLimit) if err != nil { return err } @@ -401,7 +401,7 @@ func (m *MetadataSwizzler) SetThreshold(role string, newThreshold int) error { if err != nil { return err } - return m.MetadataCache.SetMeta(roleSpecifier, metaBytes) + return m.MetadataCache.Set(roleSpecifier, metaBytes) } // RotateKey rotates the key for a role - this can invalidate that role's metadata @@ -413,7 +413,7 @@ func (m *MetadataSwizzler) RotateKey(role string, key data.PublicKey) error { roleSpecifier = path.Dir(role) } - b, err := m.MetadataCache.GetMeta(roleSpecifier, store.NoSizeLimit) + b, err := m.MetadataCache.GetSized(roleSpecifier, store.NoSizeLimit) if err != nil { return err } @@ -460,7 +460,7 @@ func (m *MetadataSwizzler) RotateKey(role string, key data.PublicKey) error { if err != nil { return err } - return m.MetadataCache.SetMeta(roleSpecifier, metaBytes) + return m.MetadataCache.Set(roleSpecifier, metaBytes) } // ChangeRootKey swaps out the root key with a new key, and re-signs the metadata @@ -471,7 +471,7 @@ func (m *MetadataSwizzler) ChangeRootKey() error { return err } - b, err := m.MetadataCache.GetMeta(data.CanonicalRootRole, store.NoSizeLimit) + b, err := m.MetadataCache.GetSized(data.CanonicalRootRole, store.NoSizeLimit) if err != nil { return err } @@ -498,7 +498,7 @@ func (m *MetadataSwizzler) ChangeRootKey() error { if err != nil { return err } - return m.MetadataCache.SetMeta(data.CanonicalRootRole, metaBytes) + return m.MetadataCache.Set(data.CanonicalRootRole, metaBytes) } // UpdateSnapshotHashes updates the snapshot to reflect the latest hash changes, to @@ -509,7 +509,7 @@ func (m *MetadataSwizzler) UpdateSnapshotHashes(roles ...string) error { snapshotSigned *data.Signed err error ) - if metaBytes, err = m.MetadataCache.GetMeta(data.CanonicalSnapshotRole, store.NoSizeLimit); err != nil { + if metaBytes, err = m.MetadataCache.GetSized(data.CanonicalSnapshotRole, store.NoSizeLimit); err != nil { return err } @@ -525,7 +525,7 @@ func (m *MetadataSwizzler) UpdateSnapshotHashes(roles ...string) error { for _, role := range roles { if role != data.CanonicalSnapshotRole && role != data.CanonicalTimestampRole { - if metaBytes, err = m.MetadataCache.GetMeta(role, store.NoSizeLimit); err != nil { + if metaBytes, err = m.MetadataCache.GetSized(role, store.NoSizeLimit); err != nil { return err } @@ -549,7 +549,7 @@ func (m *MetadataSwizzler) UpdateSnapshotHashes(roles ...string) error { if err != nil { return err } - return m.MetadataCache.SetMeta(data.CanonicalSnapshotRole, metaBytes) + return m.MetadataCache.Set(data.CanonicalSnapshotRole, metaBytes) } // UpdateTimestampHash updates the timestamp to reflect the latest snapshot changes, to @@ -561,7 +561,7 @@ func (m *MetadataSwizzler) UpdateTimestampHash() error { timestampSigned *data.Signed err error ) - if metaBytes, err = m.MetadataCache.GetMeta(data.CanonicalTimestampRole, store.NoSizeLimit); err != nil { + if metaBytes, err = m.MetadataCache.GetSized(data.CanonicalTimestampRole, store.NoSizeLimit); err != nil { return err } // we can't just create a new timestamp, because then the expiry would be @@ -570,7 +570,7 @@ func (m *MetadataSwizzler) UpdateTimestampHash() error { return err } - if metaBytes, err = m.MetadataCache.GetMeta(data.CanonicalSnapshotRole, store.NoSizeLimit); err != nil { + if metaBytes, err = m.MetadataCache.GetSized(data.CanonicalSnapshotRole, store.NoSizeLimit); err != nil { return err } @@ -593,7 +593,7 @@ func (m *MetadataSwizzler) UpdateTimestampHash() error { if err != nil { return err } - return m.MetadataCache.SetMeta(data.CanonicalTimestampRole, metaBytes) + return m.MetadataCache.Set(data.CanonicalTimestampRole, metaBytes) } // MutateRoot takes a function that mutates the root metadata - once done, it @@ -632,7 +632,7 @@ func (m *MetadataSwizzler) MutateRoot(mutate func(*data.Root)) error { if err != nil { return err } - return m.MetadataCache.SetMeta(data.CanonicalRootRole, metaBytes) + return m.MetadataCache.Set(data.CanonicalRootRole, metaBytes) } // MutateTimestamp takes a function that mutates the timestamp metadata - once done, it @@ -665,7 +665,7 @@ func (m *MetadataSwizzler) MutateTimestamp(mutate func(*data.Timestamp)) error { if err != nil { return err } - return m.MetadataCache.SetMeta(data.CanonicalTimestampRole, metaBytes) + return m.MetadataCache.Set(data.CanonicalTimestampRole, metaBytes) } // MutateSnapshot takes a function that mutates the snapshot metadata - once done, it @@ -698,7 +698,7 @@ func (m *MetadataSwizzler) MutateSnapshot(mutate func(*data.Snapshot)) error { if err != nil { return err } - return m.MetadataCache.SetMeta(data.CanonicalSnapshotRole, metaBytes) + return m.MetadataCache.Set(data.CanonicalSnapshotRole, metaBytes) } // MutateTargets takes a function that mutates the targets metadata - once done, it @@ -731,5 +731,5 @@ func (m *MetadataSwizzler) MutateTargets(mutate func(*data.Targets)) error { if err != nil { return err } - return m.MetadataCache.SetMeta(data.CanonicalTargetsRole, metaBytes) + return m.MetadataCache.Set(data.CanonicalTargetsRole, metaBytes) } diff --git a/tuf/testutils/swizzler_test.go b/tuf/testutils/swizzler_test.go index 87f12b961..61f7b5c96 100644 --- a/tuf/testutils/swizzler_test.go +++ b/tuf/testutils/swizzler_test.go @@ -11,9 +11,9 @@ import ( "testing" "time" + store "github.com/docker/notary/storage" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" - "github.com/docker/notary/tuf/store" "github.com/stretchr/testify/require" ) @@ -80,7 +80,7 @@ func TestSwizzlerSetInvalidJSON(t *testing.T) { f.SetInvalidJSON(data.CanonicalSnapshotRole) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalSnapshotRole { @@ -105,7 +105,7 @@ func TestSwizzlerAddExtraSpace(t *testing.T) { require.NoError(t, json.Unmarshal(origMeta[data.CanonicalSnapshotRole], snapshot)) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalTargetsRole { @@ -136,7 +136,7 @@ func TestSwizzlerSetInvalidSigned(t *testing.T) { f.SetInvalidSigned(data.CanonicalTargetsRole) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalTargetsRole { @@ -161,7 +161,7 @@ func TestSwizzlerSetInvalidSignedMeta(t *testing.T) { require.NoError(t, err) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalRootRole { @@ -185,7 +185,7 @@ func TestSwizzlerSetInvalidMetadataType(t *testing.T) { f.SetInvalidMetadataType(data.CanonicalTargetsRole) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalTargetsRole { @@ -208,7 +208,7 @@ func TestSwizzlerInvalidateMetadataSignatures(t *testing.T) { f.InvalidateMetadataSignatures(data.CanonicalRootRole) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalRootRole { @@ -240,7 +240,7 @@ func TestSwizzlerRemoveMetadata(t *testing.T) { f.RemoveMetadata("targets/a") for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) if role != "targets/a" { require.NoError(t, err) require.True(t, bytes.Equal(metaBytes, newMeta), "bytes have changed for role %s", role) @@ -258,7 +258,7 @@ func TestSwizzlerSignMetadataWithInvalidKey(t *testing.T) { f.SignMetadataWithInvalidKey(data.CanonicalTimestampRole) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalTimestampRole { @@ -285,7 +285,7 @@ func TestSwizzlerOffsetMetadataVersion(t *testing.T) { f.OffsetMetadataVersion("targets/a", -2) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != "targets/a" { @@ -309,7 +309,7 @@ func TestSwizzlerExpireMetadata(t *testing.T) { require.NoError(t, err) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalRootRole { @@ -334,7 +334,7 @@ func TestSwizzlerSetThresholdBaseRole(t *testing.T) { require.NoError(t, err) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) // the threshold for base roles is set in root @@ -362,7 +362,7 @@ func TestSwizzlerSetThresholdDelegatedRole(t *testing.T) { f.SetThreshold("targets/a/b", 3) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) // the threshold for "targets/a/b" is in "targets/a" @@ -392,7 +392,7 @@ func TestSwizzlerChangeRootKey(t *testing.T) { for _, role := range roles { origMeta := origMeta[role] - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) // the threshold for base roles is set in root @@ -437,7 +437,7 @@ func TestSwizzlerUpdateSnapshotHashesSpecifiedRoles(t *testing.T) { // nothing has changed, signed data should be the same (signatures might // change because signatures may have random elements f.UpdateSnapshotHashes(data.CanonicalTargetsRole) - newMeta, err := f.MetadataCache.GetMeta(data.CanonicalSnapshotRole, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(data.CanonicalSnapshotRole, store.NoSizeLimit) origSigned, newSigned := &data.Signed{}, &data.Signed{} require.NoError(t, json.Unmarshal(origMeta[data.CanonicalSnapshotRole], origSigned)) @@ -451,7 +451,7 @@ func TestSwizzlerUpdateSnapshotHashesSpecifiedRoles(t *testing.T) { // update the snapshot with just 1 role f.UpdateSnapshotHashes(data.CanonicalTargetsRole) - newMeta, err = f.MetadataCache.GetMeta(data.CanonicalSnapshotRole, store.NoSizeLimit) + newMeta, err = f.MetadataCache.GetSized(data.CanonicalSnapshotRole, store.NoSizeLimit) require.NoError(t, err) require.False(t, bytes.Equal(origMeta[data.CanonicalSnapshotRole], newMeta)) @@ -481,7 +481,7 @@ func TestSwizzlerUpdateSnapshotHashesNoSpecifiedRoles(t *testing.T) { // nothing has changed, signed data should be the same (signatures might // change because signatures may have random elements f.UpdateSnapshotHashes() - newMeta, err := f.MetadataCache.GetMeta(data.CanonicalSnapshotRole, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(data.CanonicalSnapshotRole, store.NoSizeLimit) require.NoError(t, err) origSigned, newSigned := &data.Signed{}, &data.Signed{} @@ -496,7 +496,7 @@ func TestSwizzlerUpdateSnapshotHashesNoSpecifiedRoles(t *testing.T) { // update the snapshot with just no specified roles f.UpdateSnapshotHashes() - newMeta, err = f.MetadataCache.GetMeta(data.CanonicalSnapshotRole, store.NoSizeLimit) + newMeta, err = f.MetadataCache.GetSized(data.CanonicalSnapshotRole, store.NoSizeLimit) require.NoError(t, err) require.False(t, bytes.Equal(origMeta[data.CanonicalSnapshotRole], newMeta)) @@ -527,7 +527,7 @@ func TestSwizzlerUpdateTimestamp(t *testing.T) { // nothing has changed, signed data should be the same (signatures might // change because signatures may have random elements f.UpdateTimestampHash() - newMeta, err := f.MetadataCache.GetMeta(data.CanonicalTimestampRole, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(data.CanonicalTimestampRole, store.NoSizeLimit) require.NoError(t, err) origSigned, newSigned := &data.Signed{}, &data.Signed{} @@ -540,7 +540,7 @@ func TestSwizzlerUpdateTimestamp(t *testing.T) { // update the timestamp f.UpdateTimestampHash() - newMeta, err = f.MetadataCache.GetMeta(data.CanonicalTimestampRole, store.NoSizeLimit) + newMeta, err = f.MetadataCache.GetSized(data.CanonicalTimestampRole, store.NoSizeLimit) require.NoError(t, err) require.False(t, bytes.Equal(origMeta[data.CanonicalTimestampRole], newMeta)) @@ -584,7 +584,7 @@ func TestSwizzlerMutateRoot(t *testing.T) { require.NoError(t, f.MutateRoot(func(r *data.Root) { r.Roles["hello"] = nil })) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalRootRole { @@ -610,7 +610,7 @@ func TestSwizzlerMutateTimestamp(t *testing.T) { require.NoError(t, f.MutateTimestamp(func(t *data.Timestamp) { t.Meta["hello"] = data.FileMeta{} })) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalTimestampRole { @@ -633,7 +633,7 @@ func TestSwizzlerMutateSnapshot(t *testing.T) { require.NoError(t, f.MutateSnapshot(func(s *data.Snapshot) { s.Meta["hello"] = data.FileMeta{} })) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalSnapshotRole { @@ -656,7 +656,7 @@ func TestSwizzlerMutateTargets(t *testing.T) { require.NoError(t, f.MutateTargets(func(t *data.Targets) { t.Targets["hello"] = data.FileMeta{} })) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalTargetsRole { @@ -684,7 +684,7 @@ func TestSwizzlerRotateKeyBaseRole(t *testing.T) { require.NoError(t, f.RotateKey(theRole, pubKey)) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != data.CanonicalRootRole { @@ -716,7 +716,7 @@ func TestSwizzlerRotateKeyDelegationRole(t *testing.T) { require.NoError(t, f.RotateKey(theRole, pubKey)) for role, metaBytes := range origMeta { - newMeta, err := f.MetadataCache.GetMeta(role, store.NoSizeLimit) + newMeta, err := f.MetadataCache.GetSized(role, store.NoSizeLimit) require.NoError(t, err) if role != "targets/a" { diff --git a/tuf/tuf_test.go b/tuf/tuf_test.go index 6753895d4..69a3363be 100644 --- a/tuf/tuf_test.go +++ b/tuf/tuf_test.go @@ -15,6 +15,7 @@ import ( "github.com/docker/notary/trustmanager" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/signed" + "github.com/docker/notary/tuf/utils" "github.com/stretchr/testify/require" ) @@ -1175,7 +1176,7 @@ func TestSignRootOldKeyCertExists(t *testing.T) { oldRootCert, err := cryptoservice.GenerateCertificate(rootPrivateKey, gun, referenceTime.AddDate(-9, 0, 0), referenceTime.AddDate(1, 0, 0)) require.NoError(t, err) - oldRootCertKey := trustmanager.CertToKey(oldRootCert) + oldRootCertKey := utils.CertToKey(oldRootCert) repo := initRepoWithRoot(t, cs, oldRootCertKey) @@ -1189,7 +1190,7 @@ func TestSignRootOldKeyCertExists(t *testing.T) { // Create a new certificate newRootCert, err := cryptoservice.GenerateCertificate(rootPrivateKey, gun, referenceTime, referenceTime.AddDate(10, 0, 0)) require.NoError(t, err) - newRootCertKey := trustmanager.CertToKey(newRootCert) + newRootCertKey := utils.CertToKey(newRootCert) require.NotEqual(t, oldRootCertKey.ID(), newRootCertKey.ID()) // Only trust the new certificate @@ -1228,7 +1229,7 @@ func TestSignRootOldKeyCertMissing(t *testing.T) { oldRootCert, err := cryptoservice.GenerateCertificate(rootPrivateKey, gun, referenceTime.AddDate(-9, 0, 0), referenceTime.AddDate(1, 0, 0)) require.NoError(t, err) - oldRootCertKey := trustmanager.CertToKey(oldRootCert) + oldRootCertKey := utils.CertToKey(oldRootCert) repo := initRepoWithRoot(t, cs, oldRootCertKey) @@ -1242,7 +1243,7 @@ func TestSignRootOldKeyCertMissing(t *testing.T) { // Create a new certificate newRootCert, err := cryptoservice.GenerateCertificate(rootPrivateKey, gun, referenceTime, referenceTime.AddDate(10, 0, 0)) require.NoError(t, err) - newRootCertKey := trustmanager.CertToKey(newRootCert) + newRootCertKey := utils.CertToKey(newRootCert) require.NotEqual(t, oldRootCertKey.ID(), newRootCertKey.ID()) // Only trust the new certificate @@ -1293,7 +1294,7 @@ func TestSignRootOldRootRolesAndOldSigs(t *testing.T) { rootCert, err := cryptoservice.GenerateCertificate(rootPrivateKey, gun, referenceTime.AddDate(-9, 0, 0), referenceTime.AddDate(1, 0, 0)) require.NoError(t, err) - rootCertKeys[i] = trustmanager.CertToKey(rootCert) + rootCertKeys[i] = utils.CertToKey(rootCert) rootPrivKeys[i] = rootPrivateKey } diff --git a/tuf/utils/util.go b/tuf/utils/util.go deleted file mode 100644 index a3836f680..000000000 --- a/tuf/utils/util.go +++ /dev/null @@ -1,109 +0,0 @@ -package utils - -import ( - "crypto/hmac" - "encoding/hex" - "errors" - "fmt" - gopath "path" - "path/filepath" - - "github.com/docker/notary/trustmanager" - "github.com/docker/notary/tuf/data" -) - -// ErrWrongLength indicates the length was different to that expected -var ErrWrongLength = errors.New("wrong length") - -// ErrWrongHash indicates the hash was different to that expected -type ErrWrongHash struct { - Type string - Expected []byte - Actual []byte -} - -// Error implements error interface -func (e ErrWrongHash) Error() string { - return fmt.Sprintf("wrong %s hash, expected %#x got %#x", e.Type, e.Expected, e.Actual) -} - -// ErrNoCommonHash indicates the metadata did not provide any hashes this -// client recognizes -type ErrNoCommonHash struct { - Expected data.Hashes - Actual data.Hashes -} - -// Error implements error interface -func (e ErrNoCommonHash) Error() string { - types := func(a data.Hashes) []string { - t := make([]string, 0, len(a)) - for typ := range a { - t = append(t, typ) - } - return t - } - return fmt.Sprintf("no common hash function, expected one of %s, got %s", types(e.Expected), types(e.Actual)) -} - -// ErrUnknownHashAlgorithm - client was ashed to use a hash algorithm -// it is not familiar with -type ErrUnknownHashAlgorithm struct { - Name string -} - -// Error implements error interface -func (e ErrUnknownHashAlgorithm) Error() string { - return fmt.Sprintf("unknown hash algorithm: %s", e.Name) -} - -// PassphraseFunc type for func that request a passphrase -type PassphraseFunc func(role string, confirm bool) ([]byte, error) - -// FileMetaEqual checks whether 2 FileMeta objects are consistent with eachother -func FileMetaEqual(actual data.FileMeta, expected data.FileMeta) error { - if actual.Length != expected.Length { - return ErrWrongLength - } - hashChecked := false - for typ, hash := range expected.Hashes { - if h, ok := actual.Hashes[typ]; ok { - hashChecked = true - if !hmac.Equal(h, hash) { - return ErrWrongHash{typ, hash, h} - } - } - } - if !hashChecked { - return ErrNoCommonHash{expected.Hashes, actual.Hashes} - } - return nil -} - -// NormalizeTarget adds a slash, if required, to the front of a target path -func NormalizeTarget(path string) string { - return gopath.Join("/", path) -} - -// HashedPaths prefixes the filename with the known hashes for the file, -// returning a list of possible consistent paths. -func HashedPaths(path string, hashes data.Hashes) []string { - paths := make([]string, 0, len(hashes)) - for _, hash := range hashes { - hashedPath := filepath.Join(filepath.Dir(path), hex.EncodeToString(hash)+"."+filepath.Base(path)) - paths = append(paths, hashedPath) - } - return paths -} - -// CanonicalKeyID returns the ID of the public bytes version of a TUF key. -// On regular RSA/ECDSA TUF keys, this is just the key ID. On X509 RSA/ECDSA -// TUF keys, this is the key ID of the public key part of the key in the leaf cert -func CanonicalKeyID(k data.PublicKey) (string, error) { - switch k.Algorithm() { - case data.ECDSAx509Key, data.RSAx509Key: - return trustmanager.X509PublicKeyID(k) - default: - return k.ID(), nil - } -} diff --git a/tuf/utils/util_test.go b/tuf/utils/util_test.go deleted file mode 100644 index 4c043a135..000000000 --- a/tuf/utils/util_test.go +++ /dev/null @@ -1,91 +0,0 @@ -package utils - -import ( - "encoding/hex" - "testing" - - "github.com/docker/notary/tuf/data" - "github.com/stretchr/testify/require" -) - -func TestFileMetaEqual(t *testing.T) { - type test struct { - name string - b data.FileMeta - a data.FileMeta - err func(test) error - } - fileMeta := func(length int64, hashes map[string]string) data.FileMeta { - m := data.FileMeta{Length: length, Hashes: make(map[string][]byte, len(hashes))} - for typ, hash := range hashes { - v, err := hex.DecodeString(hash) - require.NoError(t, err, "hash not in hex") - m.Hashes[typ] = v - } - return m - } - tests := []test{ - { - name: "wrong length", - a: data.FileMeta{Length: 1}, - b: data.FileMeta{Length: 2}, - err: func(test) error { return ErrWrongLength }, - }, - { - name: "wrong sha512 hash", - a: fileMeta(10, map[string]string{"sha512": "111111"}), - b: fileMeta(10, map[string]string{"sha512": "222222"}), - err: func(t test) error { return ErrWrongHash{"sha512", t.b.Hashes["sha512"], t.a.Hashes["sha512"]} }, - }, - { - name: "intersecting hashes", - a: fileMeta(10, map[string]string{"sha512": "111111", "md5": "222222"}), - b: fileMeta(10, map[string]string{"sha512": "111111", "sha256": "333333"}), - err: func(test) error { return nil }, - }, - { - name: "no common hashes", - a: fileMeta(10, map[string]string{"sha512": "111111"}), - b: fileMeta(10, map[string]string{"sha256": "222222", "md5": "333333"}), - err: func(t test) error { return ErrNoCommonHash{t.b.Hashes, t.a.Hashes} }, - }, - } - for _, run := range tests { - require.Equal(t, FileMetaEqual(run.a, run.b), run.err(run), "Files not equivalent") - } -} - -func TestNormalizeTarget(t *testing.T) { - for before, after := range map[string]string{ - "": "/", - "foo.txt": "/foo.txt", - "/bar.txt": "/bar.txt", - "foo//bar.txt": "/foo/bar.txt", - "/with/./a/dot": "/with/a/dot", - "/with/double/../dot": "/with/dot", - } { - require.Equal(t, NormalizeTarget(before), after, "Path normalization did not output expected.") - } -} - -func TestHashedPaths(t *testing.T) { - hexBytes := func(s string) []byte { - v, err := hex.DecodeString(s) - require.NoError(t, err, "String was not hex") - return v - } - hashes := data.Hashes{ - "sha512": hexBytes("abc123"), - "sha256": hexBytes("def456"), - } - paths := HashedPaths("foo/bar.txt", hashes) - // cannot use DeepEquals as the returned order is non-deterministic - require.Len(t, paths, 2, "Expected 2 paths") - expected := map[string]struct{}{"foo/abc123.bar.txt": {}, "foo/def456.bar.txt": {}} - for _, path := range paths { - if _, ok := expected[path]; !ok { - t.Fatalf("unexpected path: %s", path) - } - delete(expected, path) - } -} diff --git a/tuf/utils/utils.go b/tuf/utils/utils.go index 8de72b679..6cf499c47 100644 --- a/tuf/utils/utils.go +++ b/tuf/utils/utils.go @@ -5,6 +5,7 @@ import ( "crypto/sha512" "crypto/tls" "encoding/hex" + "errors" "fmt" "io" "net/http" @@ -150,3 +151,37 @@ func ConsistentName(role string, hashSha256 []byte) string { } return role } + +// ErrWrongLength indicates the length was different to that expected +var ErrWrongLength = errors.New("wrong length") + +// ErrWrongHash indicates the hash was different to that expected +type ErrWrongHash struct { + Type string + Expected []byte + Actual []byte +} + +// Error implements error interface +func (e ErrWrongHash) Error() string { + return fmt.Sprintf("wrong %s hash, expected %#x got %#x", e.Type, e.Expected, e.Actual) +} + +// ErrNoCommonHash indicates the metadata did not provide any hashes this +// client recognizes +type ErrNoCommonHash struct { + Expected data.Hashes + Actual data.Hashes +} + +// Error implements error interface +func (e ErrNoCommonHash) Error() string { + types := func(a data.Hashes) []string { + t := make([]string, 0, len(a)) + for typ := range a { + t = append(t, typ) + } + return t + } + return fmt.Sprintf("no common hash function, expected one of %s, got %s", types(e.Expected), types(e.Actual)) +} diff --git a/trustmanager/x509utils.go b/tuf/utils/x509.go similarity index 97% rename from trustmanager/x509utils.go rename to tuf/utils/x509.go index 4289a2890..b5faeb15a 100644 --- a/trustmanager/x509utils.go +++ b/tuf/utils/x509.go @@ -1,4 +1,4 @@ -package trustmanager +package utils import ( "bytes" @@ -22,22 +22,16 @@ import ( "github.com/docker/notary/tuf/data" ) -// CertToPEM is a utility function returns a PEM encoded x509 Certificate -func CertToPEM(cert *x509.Certificate) []byte { - pemCert := pem.EncodeToMemory(&pem.Block{Type: "CERTIFICATE", Bytes: cert.Raw}) - - return pemCert -} - -// CertChainToPEM is a utility function returns a PEM encoded chain of x509 Certificates, in the order they are passed -func CertChainToPEM(certChain []*x509.Certificate) ([]byte, error) { - var pemBytes bytes.Buffer - for _, cert := range certChain { - if err := pem.Encode(&pemBytes, &pem.Block{Type: "CERTIFICATE", Bytes: cert.Raw}); err != nil { - return nil, err - } +// CanonicalKeyID returns the ID of the public bytes version of a TUF key. +// On regular RSA/ECDSA TUF keys, this is just the key ID. On X509 RSA/ECDSA +// TUF keys, this is the key ID of the public key part of the key in the leaf cert +func CanonicalKeyID(k data.PublicKey) (string, error) { + switch k.Algorithm() { + case data.ECDSAx509Key, data.RSAx509Key: + return X509PublicKeyID(k) + default: + return k.ID(), nil } - return pemBytes.Bytes(), nil } // LoadCertFromPEM returns the first certificate found in a bunch of bytes or error @@ -64,6 +58,108 @@ func LoadCertFromPEM(pemBytes []byte) (*x509.Certificate, error) { return nil, errors.New("no certificates found in PEM data") } +// X509PublicKeyID returns a public key ID as a string, given a +// data.PublicKey that contains an X509 Certificate +func X509PublicKeyID(certPubKey data.PublicKey) (string, error) { + // Note that this only loads the first certificate from the public key + cert, err := LoadCertFromPEM(certPubKey.Public()) + if err != nil { + return "", err + } + pubKeyBytes, err := x509.MarshalPKIXPublicKey(cert.PublicKey) + if err != nil { + return "", err + } + + var key data.PublicKey + switch certPubKey.Algorithm() { + case data.ECDSAx509Key: + key = data.NewECDSAPublicKey(pubKeyBytes) + case data.RSAx509Key: + key = data.NewRSAPublicKey(pubKeyBytes) + } + + return key.ID(), nil +} + +// ParsePEMPrivateKey returns a data.PrivateKey from a PEM encoded private key. It +// only supports RSA (PKCS#1) and attempts to decrypt using the passphrase, if encrypted. +func ParsePEMPrivateKey(pemBytes []byte, passphrase string) (data.PrivateKey, error) { + block, _ := pem.Decode(pemBytes) + if block == nil { + return nil, errors.New("no valid private key found") + } + + var privKeyBytes []byte + var err error + if x509.IsEncryptedPEMBlock(block) { + privKeyBytes, err = x509.DecryptPEMBlock(block, []byte(passphrase)) + if err != nil { + return nil, errors.New("could not decrypt private key") + } + } else { + privKeyBytes = block.Bytes + } + + switch block.Type { + case "RSA PRIVATE KEY": + rsaPrivKey, err := x509.ParsePKCS1PrivateKey(privKeyBytes) + if err != nil { + return nil, fmt.Errorf("could not parse DER encoded key: %v", err) + } + + tufRSAPrivateKey, err := RSAToPrivateKey(rsaPrivKey) + if err != nil { + return nil, fmt.Errorf("could not convert rsa.PrivateKey to data.PrivateKey: %v", err) + } + + return tufRSAPrivateKey, nil + case "EC PRIVATE KEY": + ecdsaPrivKey, err := x509.ParseECPrivateKey(privKeyBytes) + if err != nil { + return nil, fmt.Errorf("could not parse DER encoded private key: %v", err) + } + + tufECDSAPrivateKey, err := ECDSAToPrivateKey(ecdsaPrivKey) + if err != nil { + return nil, fmt.Errorf("could not convert ecdsa.PrivateKey to data.PrivateKey: %v", err) + } + + return tufECDSAPrivateKey, nil + case "ED25519 PRIVATE KEY": + // We serialize ED25519 keys by concatenating the private key + // to the public key and encoding with PEM. See the + // ED25519ToPrivateKey function. + tufECDSAPrivateKey, err := ED25519ToPrivateKey(privKeyBytes) + if err != nil { + return nil, fmt.Errorf("could not convert ecdsa.PrivateKey to data.PrivateKey: %v", err) + } + + return tufECDSAPrivateKey, nil + + default: + return nil, fmt.Errorf("unsupported key type %q", block.Type) + } +} + +// CertToPEM is a utility function returns a PEM encoded x509 Certificate +func CertToPEM(cert *x509.Certificate) []byte { + pemCert := pem.EncodeToMemory(&pem.Block{Type: "CERTIFICATE", Bytes: cert.Raw}) + + return pemCert +} + +// CertChainToPEM is a utility function returns a PEM encoded chain of x509 Certificates, in the order they are passed +func CertChainToPEM(certChain []*x509.Certificate) ([]byte, error) { + var pemBytes bytes.Buffer + for _, cert := range certChain { + if err := pem.Encode(&pemBytes, &pem.Block{Type: "CERTIFICATE", Bytes: cert.Raw}); err != nil { + return nil, err + } + } + return pemBytes.Bytes(), nil +} + // LoadCertFromFile loads the first certificate from the file provided. The // data is expected to be PEM Encoded and contain one of more certificates // with PEM type "CERTIFICATE" @@ -138,66 +234,6 @@ func GetIntermediateCerts(certs []*x509.Certificate) []*x509.Certificate { return intCerts } -// ParsePEMPrivateKey returns a data.PrivateKey from a PEM encoded private key. It -// only supports RSA (PKCS#1) and attempts to decrypt using the passphrase, if encrypted. -func ParsePEMPrivateKey(pemBytes []byte, passphrase string) (data.PrivateKey, error) { - block, _ := pem.Decode(pemBytes) - if block == nil { - return nil, errors.New("no valid private key found") - } - - var privKeyBytes []byte - var err error - if x509.IsEncryptedPEMBlock(block) { - privKeyBytes, err = x509.DecryptPEMBlock(block, []byte(passphrase)) - if err != nil { - return nil, errors.New("could not decrypt private key") - } - } else { - privKeyBytes = block.Bytes - } - - switch block.Type { - case "RSA PRIVATE KEY": - rsaPrivKey, err := x509.ParsePKCS1PrivateKey(privKeyBytes) - if err != nil { - return nil, fmt.Errorf("could not parse DER encoded key: %v", err) - } - - tufRSAPrivateKey, err := RSAToPrivateKey(rsaPrivKey) - if err != nil { - return nil, fmt.Errorf("could not convert rsa.PrivateKey to data.PrivateKey: %v", err) - } - - return tufRSAPrivateKey, nil - case "EC PRIVATE KEY": - ecdsaPrivKey, err := x509.ParseECPrivateKey(privKeyBytes) - if err != nil { - return nil, fmt.Errorf("could not parse DER encoded private key: %v", err) - } - - tufECDSAPrivateKey, err := ECDSAToPrivateKey(ecdsaPrivKey) - if err != nil { - return nil, fmt.Errorf("could not convert ecdsa.PrivateKey to data.PrivateKey: %v", err) - } - - return tufECDSAPrivateKey, nil - case "ED25519 PRIVATE KEY": - // We serialize ED25519 keys by concatenating the private key - // to the public key and encoding with PEM. See the - // ED25519ToPrivateKey function. - tufECDSAPrivateKey, err := ED25519ToPrivateKey(privKeyBytes) - if err != nil { - return nil, fmt.Errorf("could not convert ecdsa.PrivateKey to data.PrivateKey: %v", err) - } - - return tufECDSAPrivateKey, nil - - default: - return nil, fmt.Errorf("unsupported key type %q", block.Type) - } -} - // ParsePEMPublicKey returns a data.PublicKey from a PEM encoded public key or certificate. func ParsePEMPublicKey(pubKeyBytes []byte) (data.PublicKey, error) { pemBlock, _ := pem.Decode(pubKeyBytes) @@ -498,27 +534,3 @@ func NewCertificate(gun string, startTime, endTime time.Time) (*x509.Certificate BasicConstraintsValid: true, }, nil } - -// X509PublicKeyID returns a public key ID as a string, given a -// data.PublicKey that contains an X509 Certificate -func X509PublicKeyID(certPubKey data.PublicKey) (string, error) { - // Note that this only loads the first certificate from the public key - cert, err := LoadCertFromPEM(certPubKey.Public()) - if err != nil { - return "", err - } - pubKeyBytes, err := x509.MarshalPKIXPublicKey(cert.PublicKey) - if err != nil { - return "", err - } - - var key data.PublicKey - switch certPubKey.Algorithm() { - case data.ECDSAx509Key: - key = data.NewECDSAPublicKey(pubKeyBytes) - case data.RSAx509Key: - key = data.NewRSAPublicKey(pubKeyBytes) - } - - return key.ID(), nil -} diff --git a/trustmanager/x509utils_test.go b/tuf/utils/x509_test.go similarity index 94% rename from trustmanager/x509utils_test.go rename to tuf/utils/x509_test.go index 11710cb38..4b98f05c3 100644 --- a/trustmanager/x509utils_test.go +++ b/tuf/utils/x509_test.go @@ -1,4 +1,4 @@ -package trustmanager +package utils import ( "crypto/ecdsa" @@ -16,15 +16,15 @@ import ( func TestCertsToKeys(t *testing.T) { // Get root certificate - rootCA, err := LoadCertFromFile("../fixtures/root-ca.crt") + rootCA, err := LoadCertFromFile("../../fixtures/root-ca.crt") require.NoError(t, err) // Get intermediate certificate - intermediateCA, err := LoadCertFromFile("../fixtures/intermediate-ca.crt") + intermediateCA, err := LoadCertFromFile("../../fixtures/intermediate-ca.crt") require.NoError(t, err) // Get leaf certificate - leafCert, err := LoadCertFromFile("../fixtures/secure.example.com.crt") + leafCert, err := LoadCertFromFile("../../fixtures/secure.example.com.crt") require.NoError(t, err) // Get our certList with Leaf Cert and Intermediate @@ -170,14 +170,14 @@ func TestKeyOperations(t *testing.T) { // X509PublickeyID returns the public key ID of a RSA X509 key rather than the // cert ID func TestRSAX509PublickeyID(t *testing.T) { - fileBytes, err := ioutil.ReadFile("../fixtures/notary-server.key") + fileBytes, err := ioutil.ReadFile("../../fixtures/notary-server.key") require.NoError(t, err) privKey, err := ParsePEMPrivateKey(fileBytes, "") require.NoError(t, err) expectedTUFID := privKey.ID() - cert, err := LoadCertFromFile("../fixtures/notary-server.crt") + cert, err := LoadCertFromFile("../../fixtures/notary-server.crt") require.NoError(t, err) rsaKeyBytes, err := x509.MarshalPKIXPublicKey(cert.PublicKey) diff --git a/utils/configuration_test.go b/utils/configuration_test.go index 5077bc0a5..743031b45 100644 --- a/utils/configuration_test.go +++ b/utils/configuration_test.go @@ -12,7 +12,7 @@ import ( "github.com/Sirupsen/logrus" "github.com/bugsnag/bugsnag-go" "github.com/docker/notary" - "github.com/docker/notary/trustmanager" + "github.com/docker/notary/tuf/utils" "github.com/spf13/viper" "github.com/stretchr/testify/require" ) @@ -390,7 +390,7 @@ func TestParseTLSWithTLS(t *testing.T) { expectedCert, err := tls.LoadX509KeyPair(Cert, Key) require.NoError(t, err) - expectedRoot, err := trustmanager.LoadCertFromFile(Root) + expectedRoot, err := utils.LoadCertFromFile(Root) require.NoError(t, err) require.Len(t, tlsConfig.Certificates, 1) @@ -449,7 +449,7 @@ func TestParseTLSWithEnvironmentVariables(t *testing.T) { expectedCert, err := tls.LoadX509KeyPair(Cert, Key) require.NoError(t, err) - expectedRoot, err := trustmanager.LoadCertFromFile(Root) + expectedRoot, err := utils.LoadCertFromFile(Root) require.NoError(t, err) require.Len(t, tlsConfig.Certificates, 1) From dff70446b51d88a83788a5d55a57ecfe28a53454 Mon Sep 17 00:00:00 2001 From: David Lawrence Date: Mon, 11 Jul 2016 10:36:30 -0700 Subject: [PATCH 3/3] re-implement import/export Signed-off-by: David Lawrence (github: endophage) --- client/client_test.go | 16 +- cmd/notary/keys.go | 124 +++++++++++- cmd/notary/keys_nonpkcs11.go | 10 + cmd/notary/keys_nonpkcs11_test.go | 151 ++++++++++++++ cmd/notary/keys_pkcs11.go | 28 ++- cmd/notary/keys_pkcs11_test.go | 107 ++++++++++ cmd/notary/keys_test.go | 246 ++++++++++++++++++++++- cmd/notary/main_test.go | 1 + const.go | 2 + storage/filestore.go | 7 + storage/httpstore_test.go | 2 +- trustmanager/interfaces.go | 4 - trustmanager/keystore.go | 3 +- trustmanager/yubikey/import.go | 57 ++++++ tuf/utils/utils.go | 35 ---- utils/keys.go | 143 ++++++++++++++ utils/keys_test.go | 317 ++++++++++++++++++++++++++++++ 17 files changed, 1194 insertions(+), 59 deletions(-) create mode 100644 cmd/notary/keys_nonpkcs11_test.go create mode 100644 cmd/notary/keys_pkcs11_test.go create mode 100644 trustmanager/yubikey/import.go create mode 100644 utils/keys.go create mode 100644 utils/keys_test.go diff --git a/client/client_test.go b/client/client_test.go index 0734c95b5..f45a910f8 100644 --- a/client/client_test.go +++ b/client/client_test.go @@ -3341,19 +3341,19 @@ func TestDeleteRemoteRepo(t *testing.T) { // Try connecting to the remote store directly and make sure that no metadata exists for this gun remoteStore, err := getRemoteStore(repo.baseURL, repo.gun, repo.roundTrip) require.NoError(t, err) - meta, err := remoteStore.GetMeta(data.CanonicalRootRole, store.NoSizeLimit) + meta, err := remoteStore.GetSized(data.CanonicalRootRole, store.NoSizeLimit) require.Error(t, err) require.IsType(t, store.ErrMetaNotFound{}, err) require.Nil(t, meta) - meta, err = remoteStore.GetMeta(data.CanonicalTargetsRole, store.NoSizeLimit) + meta, err = remoteStore.GetSized(data.CanonicalTargetsRole, store.NoSizeLimit) require.Error(t, err) require.IsType(t, store.ErrMetaNotFound{}, err) require.Nil(t, meta) - meta, err = remoteStore.GetMeta(data.CanonicalSnapshotRole, store.NoSizeLimit) + meta, err = remoteStore.GetSized(data.CanonicalSnapshotRole, store.NoSizeLimit) require.Error(t, err) require.IsType(t, store.ErrMetaNotFound{}, err) require.Nil(t, meta) - meta, err = remoteStore.GetMeta(data.CanonicalTimestampRole, store.NoSizeLimit) + meta, err = remoteStore.GetSized(data.CanonicalTimestampRole, store.NoSizeLimit) require.Error(t, err) require.IsType(t, store.ErrMetaNotFound{}, err) require.Nil(t, meta) @@ -3364,16 +3364,16 @@ func TestDeleteRemoteRepo(t *testing.T) { requireRepoHasExpectedMetadata(t, longLivingRepo, data.CanonicalSnapshotRole, true) remoteStore, err = getRemoteStore(longLivingRepo.baseURL, longLivingRepo.gun, longLivingRepo.roundTrip) require.NoError(t, err) - meta, err = remoteStore.GetMeta(data.CanonicalRootRole, store.NoSizeLimit) + meta, err = remoteStore.GetSized(data.CanonicalRootRole, store.NoSizeLimit) require.NoError(t, err) require.NotNil(t, meta) - meta, err = remoteStore.GetMeta(data.CanonicalTargetsRole, store.NoSizeLimit) + meta, err = remoteStore.GetSized(data.CanonicalTargetsRole, store.NoSizeLimit) require.NoError(t, err) require.NotNil(t, meta) - meta, err = remoteStore.GetMeta(data.CanonicalSnapshotRole, store.NoSizeLimit) + meta, err = remoteStore.GetSized(data.CanonicalSnapshotRole, store.NoSizeLimit) require.NoError(t, err) require.NotNil(t, meta) - meta, err = remoteStore.GetMeta(data.CanonicalTimestampRole, store.NoSizeLimit) + meta, err = remoteStore.GetSized(data.CanonicalTimestampRole, store.NoSizeLimit) require.NoError(t, err) require.NotNil(t, meta) diff --git a/cmd/notary/keys.go b/cmd/notary/keys.go index 6d17a26c6..a80e69b7e 100644 --- a/cmd/notary/keys.go +++ b/cmd/notary/keys.go @@ -9,12 +9,15 @@ import ( notaryclient "github.com/docker/notary/client" "github.com/docker/notary/cryptoservice" + store "github.com/docker/notary/storage" "github.com/docker/notary/trustmanager" + "github.com/docker/notary/utils" "github.com/docker/notary" "github.com/docker/notary/tuf/data" "github.com/spf13/cobra" "github.com/spf13/viper" + "os" ) var cmdKeyTemplate = usageTemplate{ @@ -53,6 +56,18 @@ var cmdKeyPasswdTemplate = usageTemplate{ Long: "Changes the passphrase for the key with the given keyID. Will require validation of the old passphrase.", } +var cmdKeyImportTemplate = usageTemplate{ + Use: "import pemfile [ pemfile ... ]", + Short: "Imports all keys from all provided .pem files", + Long: "Imports all keys from all provided .pem files by reading each PEM block from the file and writing that block to a unique object in the local keystore. A Yubikey will be the prefferred import location for root keys if present.", +} + +var cmdKeyExportTemplate = usageTemplate{ + Use: "export", + Short: "Exports all keys from all local keystores. Can be filtered using the --key and --gun flags.", + Long: "Exports all keys from all local keystores. Which keys are exported can be restricted by using the --key or --gun flags. By default the result is sent to stdout, it can be directed to a file with the -o flag. Keys stored in a Yubikey cannot be exported.", +} + type keyCommander struct { // these need to be set configGetter func() (*viper.Viper, error) @@ -63,6 +78,10 @@ type keyCommander struct { rotateKeyServerManaged bool input io.Reader + + exportGUNs []string + exportKeyIDs []string + outFile string } func (k *keyCommander) GetCommand() *cobra.Command { @@ -78,6 +97,28 @@ func (k *keyCommander) GetCommand() *cobra.Command { "Required for timestamp role, optional for snapshot role") cmd.AddCommand(cmdRotateKey) + cmd.AddCommand(cmdKeyImportTemplate.ToCommand(k.importKeys)) + cmdExport := cmdKeyExportTemplate.ToCommand(k.exportKeys) + cmdExport.Flags().StringSliceVar( + &k.exportGUNs, + "gun", + nil, + "GUNs for which to export keys", + ) + cmdExport.Flags().StringSliceVar( + &k.exportKeyIDs, + "key", + nil, + "Key IDs to export", + ) + cmdExport.Flags().StringVarP( + &k.outFile, + "output", + "o", + "", + "Filepath to write export output to", + ) + cmd.AddCommand(cmdExport) return cmd } @@ -345,14 +386,91 @@ func (k *keyCommander) keyPassphraseChange(cmd *cobra.Command, args []string) er if err != nil { return err } - cmd.Println("") - cmd.Printf("Successfully updated passphrase for key ID: %s", keyID) - cmd.Println("") + cmd.Printf("\nSuccessfully updated passphrase for key ID: %s\n", keyID) + return nil +} + +func (k *keyCommander) importKeys(cmd *cobra.Command, args []string) error { + if len(args) < 1 { + cmd.Usage() + return fmt.Errorf("must specify at least one input file to import keys from") + } + config, err := k.configGetter() + if err != nil { + return err + } + + directory := config.GetString("trust_dir") + importers, err := getImporters(directory, k.getRetriever()) + if err != nil { + return err + } + for _, file := range args { + from, err := os.OpenFile(file, os.O_RDONLY, notary.PrivKeyPerms) + defer from.Close() + + if err = utils.ImportKeys(from, importers); err != nil { + return err + } + } + return nil +} + +func (k *keyCommander) exportKeys(cmd *cobra.Command, args []string) error { + var ( + out io.Writer + err error + ) + if len(args) > 0 { + cmd.Usage() + return fmt.Errorf("export does not take any positional arguments") + } + config, err := k.configGetter() + if err != nil { + return err + } + + if k.outFile == "" { + out = cmd.Out() + } else { + f, err := os.OpenFile(k.outFile, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, notary.PrivKeyPerms) + if err != nil { + return err + } + defer f.Close() + out = f + } + + directory := config.GetString("trust_dir") + fileStore, err := store.NewPrivateKeyFileStorage(directory, notary.KeyExtension) + if err != nil { + return err + } + if len(k.exportGUNs) > 0 { + if len(k.exportKeyIDs) > 0 { + return fmt.Errorf("Only the --gun or --key flag may be provided, not a mix of the two flags") + } + for _, gun := range k.exportGUNs { + gunPath := filepath.Join(notary.NonRootKeysSubdir, gun) + return utils.ExportKeysByGUN(out, fileStore, gunPath) + } + } else if len(k.exportKeyIDs) > 0 { + return utils.ExportKeysByID(out, fileStore, k.exportKeyIDs) + } + // export everything + keys := fileStore.ListFiles() + for _, k := range keys { + err := utils.ExportKeys(out, fileStore, k) + if err != nil { + return err + } + } return nil } func (k *keyCommander) getKeyStores( config *viper.Viper, withHardware, hardwareBackup bool) ([]trustmanager.KeyStore, error) { + retriever := k.getRetriever() directory := config.GetString("trust_dir") diff --git a/cmd/notary/keys_nonpkcs11.go b/cmd/notary/keys_nonpkcs11.go index a6bde6fac..eaa1e6411 100644 --- a/cmd/notary/keys_nonpkcs11.go +++ b/cmd/notary/keys_nonpkcs11.go @@ -6,9 +6,19 @@ import ( "errors" "github.com/docker/notary" + store "github.com/docker/notary/storage" "github.com/docker/notary/trustmanager" + "github.com/docker/notary/utils" ) func getYubiStore(fileKeyStore trustmanager.KeyStore, ret notary.PassRetriever) (trustmanager.KeyStore, error) { return nil, errors.New("Not built with hardware support") } + +func getImporters(baseDir string, _ notary.PassRetriever) ([]utils.Importer, error) { + fileStore, err := store.NewPrivateKeyFileStorage(baseDir, notary.KeyExtension) + if err != nil { + return nil, err + } + return []utils.Importer{fileStore}, nil +} diff --git a/cmd/notary/keys_nonpkcs11_test.go b/cmd/notary/keys_nonpkcs11_test.go new file mode 100644 index 000000000..515551863 --- /dev/null +++ b/cmd/notary/keys_nonpkcs11_test.go @@ -0,0 +1,151 @@ +//+build !pkcs11 + +package main + +import ( + "encoding/pem" + "github.com/docker/notary" + "github.com/docker/notary/cryptoservice" + "github.com/docker/notary/passphrase" + store "github.com/docker/notary/storage" + "github.com/docker/notary/trustmanager" + "github.com/docker/notary/tuf/data" + "github.com/spf13/cobra" + "github.com/spf13/viper" + "github.com/stretchr/testify/require" + "io/ioutil" + "os" + "path/filepath" + "testing" +) + +func TestImportKeysNoYubikey(t *testing.T) { + setUp(t) + tempBaseDir, err := ioutil.TempDir("/tmp", "notary-test-") + require.NoError(t, err) + defer os.RemoveAll(tempBaseDir) + input, err := ioutil.TempFile("/tmp", "notary-test-import-") + require.NoError(t, err) + defer os.RemoveAll(input.Name()) + k := &keyCommander{ + configGetter: func() (*viper.Viper, error) { + v := viper.New() + v.SetDefault("trust_dir", tempBaseDir) + return v, nil + }, + getRetriever: func() notary.PassRetriever { return passphrase.ConstantRetriever("pass") }, + } + + memStore := store.NewMemoryStore(nil) + ks := trustmanager.NewGenericKeyStore(memStore, k.getRetriever()) + cs := cryptoservice.NewCryptoService(ks) + + pubK, err := cs.Create(data.CanonicalRootRole, "ankh", data.ECDSAKey) + require.NoError(t, err) + bytes, err := memStore.Get(notary.RootKeysSubdir + "/" + pubK.ID()) + require.NoError(t, err) + b, _ := pem.Decode(bytes) + b.Headers["path"] = "ankh" + + pubK, err = cs.Create(data.CanonicalTargetsRole, "morpork", data.ECDSAKey) + require.NoError(t, err) + bytes, err = memStore.Get(notary.NonRootKeysSubdir + "/morpork/" + pubK.ID()) + require.NoError(t, err) + c, _ := pem.Decode(bytes) + c.Headers["path"] = "morpork" + + bBytes := pem.EncodeToMemory(b) + cBytes := pem.EncodeToMemory(c) + input.Write(bBytes) + input.Write(cBytes) + + file := input.Name() + err = input.Close() // close so import can open + require.NoError(t, err) + + err = k.importKeys(&cobra.Command{}, []string{file}) + require.NoError(t, err) + + fileStore, err := store.NewPrivateKeyFileStorage(tempBaseDir, notary.KeyExtension) + bResult, err := fileStore.Get("ankh") + require.NoError(t, err) + cResult, err := fileStore.Get("morpork") + require.NoError(t, err) + + block, rest := pem.Decode(bResult) + require.Equal(t, b.Bytes, block.Bytes) + require.Len(t, rest, 0) + + block, rest = pem.Decode(cResult) + require.Equal(t, c.Bytes, block.Bytes) + require.Len(t, rest, 0) +} + +func TestExportImportKeysNoYubikey(t *testing.T) { + setUp(t) + exportTempDir, err := ioutil.TempDir("/tmp", "notary-test-") + require.NoError(t, err) + defer os.RemoveAll(exportTempDir) + tempfile, err := ioutil.TempFile("/tmp", "notary-test-import-") + require.NoError(t, err) + tempfile.Close() + defer os.RemoveAll(tempfile.Name()) + exportCommander := &keyCommander{ + configGetter: func() (*viper.Viper, error) { + v := viper.New() + v.SetDefault("trust_dir", exportTempDir) + return v, nil + }, + getRetriever: func() notary.PassRetriever { return passphrase.ConstantRetriever("pass") }, + } + exportCommander.outFile = tempfile.Name() + + exportStore, err := store.NewPrivateKeyFileStorage(exportTempDir, notary.KeyExtension) + ks := trustmanager.NewGenericKeyStore(exportStore, exportCommander.getRetriever()) + cs := cryptoservice.NewCryptoService(ks) + + pubK, err := cs.Create(data.CanonicalRootRole, "ankh", data.ECDSAKey) + require.NoError(t, err) + bID := pubK.ID() + bOrigBytes, err := exportStore.Get(filepath.Join(notary.RootKeysSubdir, bID)) + require.NoError(t, err) + bOrig, _ := pem.Decode(bOrigBytes) + + pubK, err = cs.Create(data.CanonicalTargetsRole, "morpork", data.ECDSAKey) + require.NoError(t, err) + cID := pubK.ID() + cOrigBytes, err := exportStore.Get(filepath.Join(notary.NonRootKeysSubdir, "morpork", cID)) + require.NoError(t, err) + cOrig, _ := pem.Decode(cOrigBytes) + + exportCommander.exportKeys(&cobra.Command{}, nil) + + importTempDir, err := ioutil.TempDir("/tmp", "notary-test-") + require.NoError(t, err) + defer os.RemoveAll(importTempDir) + importCommander := &keyCommander{ + configGetter: func() (*viper.Viper, error) { + v := viper.New() + v.SetDefault("trust_dir", importTempDir) + return v, nil + }, + getRetriever: func() notary.PassRetriever { return passphrase.ConstantRetriever("pass") }, + } + + err = importCommander.importKeys(&cobra.Command{}, []string{tempfile.Name()}) + require.NoError(t, err) + + importStore, err := store.NewPrivateKeyFileStorage(importTempDir, notary.KeyExtension) + bResult, err := importStore.Get(filepath.Join(notary.RootKeysSubdir, bID)) + require.NoError(t, err) + cResult, err := importStore.Get(filepath.Join(notary.NonRootKeysSubdir, "morpork", cID)) + require.NoError(t, err) + + block, rest := pem.Decode(bResult) + require.Equal(t, bOrig.Bytes, block.Bytes) + require.Len(t, rest, 0) + + block, rest = pem.Decode(cResult) + require.Equal(t, cOrig.Bytes, block.Bytes) + require.Len(t, rest, 0) +} diff --git a/cmd/notary/keys_pkcs11.go b/cmd/notary/keys_pkcs11.go index 88bdcc359..b47234558 100644 --- a/cmd/notary/keys_pkcs11.go +++ b/cmd/notary/keys_pkcs11.go @@ -4,10 +4,36 @@ package main import ( "github.com/docker/notary" + store "github.com/docker/notary/storage" "github.com/docker/notary/trustmanager" "github.com/docker/notary/trustmanager/yubikey" + "github.com/docker/notary/utils" ) -func getYubiStore(fileKeyStore trustmanager.KeyStore, ret notary.PassRetriever) (trustmanager.KeyStore, error) { +func getYubiStore(fileKeyStore trustmanager.KeyStore, ret notary.PassRetriever) (*yubikey.YubiStore, error) { return yubikey.NewYubiStore(fileKeyStore, ret) } + +func getImporters(baseDir string, ret notary.PassRetriever) ([]utils.Importer, error) { + + var importers []utils.Importer + if yubikey.IsAccessible() { + yubiStore, err := getYubiStore(nil, ret) + if err == nil { + importers = append( + importers, + yubikey.NewImporter(yubiStore, ret), + ) + } + } + fileStore, err := store.NewPrivateKeyFileStorage(baseDir, notary.KeyExtension) + if err == nil { + importers = append( + importers, + fileStore, + ) + } else if len(importers) == 0 { + return nil, err // couldn't initialize any stores + } + return importers, nil +} diff --git a/cmd/notary/keys_pkcs11_test.go b/cmd/notary/keys_pkcs11_test.go new file mode 100644 index 000000000..c6b4e1aad --- /dev/null +++ b/cmd/notary/keys_pkcs11_test.go @@ -0,0 +1,107 @@ +// +build pkcs11 + +package main + +import ( + "encoding/pem" + "io/ioutil" + "os" + "testing" + + "github.com/spf13/cobra" + "github.com/spf13/viper" + "github.com/stretchr/testify/require" + + "github.com/docker/notary" + "github.com/docker/notary/cryptoservice" + "github.com/docker/notary/passphrase" + store "github.com/docker/notary/storage" + "github.com/docker/notary/trustmanager" + "github.com/docker/notary/trustmanager/yubikey" + "github.com/docker/notary/tuf/data" +) + +func TestImportWithYubikey(t *testing.T) { + if !yubikey.IsAccessible() { + t.Skip("Must have Yubikey access.") + } + setUp(t) + tempBaseDir, err := ioutil.TempDir("/tmp", "notary-test-") + require.NoError(t, err) + defer os.RemoveAll(tempBaseDir) + input, err := ioutil.TempFile("/tmp", "notary-test-import-") + require.NoError(t, err) + defer os.RemoveAll(input.Name()) + k := &keyCommander{ + configGetter: func() (*viper.Viper, error) { + v := viper.New() + v.SetDefault("trust_dir", tempBaseDir) + return v, nil + }, + getRetriever: func() notary.PassRetriever { return passphrase.ConstantRetriever("pass") }, + } + + memStore := store.NewMemoryStore(nil) + ks := trustmanager.NewGenericKeyStore(memStore, k.getRetriever()) + cs := cryptoservice.NewCryptoService(ks) + + pubK, err := cs.Create(data.CanonicalRootRole, "ankh", data.ECDSAKey) + require.NoError(t, err) + bID := pubK.ID() // need to check presence in yubikey later + require.NoError(t, err) + bytes, err := memStore.Get(notary.RootKeysSubdir + "/" + pubK.ID()) + require.NoError(t, err) + b, _ := pem.Decode(bytes) + b.Headers["path"] = "ankh" + require.Equal(t, "root", b.Headers["role"]) + + pubK, err = cs.Create(data.CanonicalTargetsRole, "morpork", data.ECDSAKey) + require.NoError(t, err) + cID := pubK.ID() + bytes, err = memStore.Get(notary.NonRootKeysSubdir + "/morpork/" + pubK.ID()) + require.NoError(t, err) + c, _ := pem.Decode(bytes) + c.Headers["path"] = "morpork" + + bBytes := pem.EncodeToMemory(b) + cBytes := pem.EncodeToMemory(c) + input.Write(bBytes) + input.Write(cBytes) + + file := input.Name() + err = input.Close() // close so import can open + require.NoError(t, err) + + err = k.importKeys(&cobra.Command{}, []string{file}) + require.NoError(t, err) + + yks, err := yubikey.NewYubiStore(nil, k.getRetriever()) + require.NoError(t, err) + _, _, err = yks.GetKey(bID) + require.NoError(t, err) + _, _, err = yks.GetKey(cID) + require.Error(t, err) // c is non-root, should not be in yubikey + + fileStore, err := store.NewPrivateKeyFileStorage(tempBaseDir, notary.KeyExtension) + _, err = fileStore.Get("ankh") + require.Error(t, err) // b should only be in yubikey, not in filestore + + cResult, err := fileStore.Get("morpork") + require.NoError(t, err) + + block, rest := pem.Decode(cResult) + require.Equal(t, c.Bytes, block.Bytes) + require.Len(t, rest, 0) +} + +func TestGetImporters(t *testing.T) { + if !yubikey.IsAccessible() { + t.Skip("Must have Yubikey access.") + } + tempBaseDir, err := ioutil.TempDir("/tmp", "notary-test-") + require.NoError(t, err) + defer os.RemoveAll(tempBaseDir) + importers, err := getImporters(tempBaseDir, passphrase.ConstantRetriever("pass")) + require.NoError(t, err) + require.Len(t, importers, 2) +} diff --git a/cmd/notary/keys_test.go b/cmd/notary/keys_test.go index cde7209fd..61719c293 100644 --- a/cmd/notary/keys_test.go +++ b/cmd/notary/keys_test.go @@ -3,6 +3,7 @@ package main import ( "bytes" "crypto/rand" + "encoding/pem" "fmt" "io/ioutil" "net/http" @@ -11,23 +12,25 @@ import ( "strings" "testing" - "golang.org/x/net/context" - "github.com/Sirupsen/logrus" ctxu "github.com/docker/distribution/context" + "github.com/spf13/cobra" + "github.com/spf13/viper" + "github.com/stretchr/testify/require" + "golang.org/x/net/context" + "github.com/docker/notary" "github.com/docker/notary/client" "github.com/docker/notary/cryptoservice" "github.com/docker/notary/passphrase" "github.com/docker/notary/server" "github.com/docker/notary/server/storage" + store "github.com/docker/notary/storage" "github.com/docker/notary/trustmanager" "github.com/docker/notary/trustpinning" "github.com/docker/notary/tuf/data" "github.com/docker/notary/tuf/utils" - "github.com/spf13/cobra" - "github.com/spf13/viper" - "github.com/stretchr/testify/require" + "path/filepath" ) var ret = passphrase.ConstantRetriever("pass") @@ -534,6 +537,239 @@ func TestChangeKeyPassphraseNonexistentID(t *testing.T) { require.Contains(t, err.Error(), "could not retrieve local key for key ID provided") } +func TestExportKeys(t *testing.T) { + setUp(t) + tempBaseDir, err := ioutil.TempDir("/tmp", "notary-test-") + require.NoError(t, err) + defer os.RemoveAll(tempBaseDir) + output, err := ioutil.TempFile("/tmp", "notary-test-import-") + require.NoError(t, err) + defer os.RemoveAll(output.Name()) + k := &keyCommander{ + configGetter: func() (*viper.Viper, error) { + v := viper.New() + v.SetDefault("trust_dir", tempBaseDir) + return v, nil + }, + } + k.outFile = output.Name() + err = output.Close() // close so export can open + require.NoError(t, err) + + b := &pem.Block{} + b.Bytes = make([]byte, 1000) + rand.Read(b.Bytes) + + c := &pem.Block{} + c.Bytes = make([]byte, 1000) + rand.Read(c.Bytes) + + bBytes := pem.EncodeToMemory(b) + cBytes := pem.EncodeToMemory(c) + require.NoError(t, err) + + fileStore, err := store.NewPrivateKeyFileStorage(tempBaseDir, notary.KeyExtension) + err = fileStore.Set(filepath.Join(notary.NonRootKeysSubdir, "discworld/ankh"), bBytes) + require.NoError(t, err) + err = fileStore.Set(filepath.Join(notary.NonRootKeysSubdir, "discworld/morpork"), cBytes) + require.NoError(t, err) + + err = k.exportKeys(&cobra.Command{}, nil) + require.NoError(t, err) + + outRes, err := ioutil.ReadFile(k.outFile) + require.NoError(t, err) + + block, rest := pem.Decode(outRes) + require.Equal(t, b.Bytes, block.Bytes) + require.Equal(t, filepath.Join(notary.NonRootKeysSubdir, "discworld/ankh"), block.Headers["path"]) + require.Equal(t, "discworld", block.Headers["gun"]) + + block, rest = pem.Decode(rest) + require.Equal(t, c.Bytes, block.Bytes) + require.Equal(t, filepath.Join(notary.NonRootKeysSubdir, "discworld/morpork"), block.Headers["path"]) + require.Equal(t, "discworld", block.Headers["gun"]) + require.Len(t, rest, 0) + + // test no outFile uses stdout (or our replace buffer) + k.outFile = "" + cmd := &cobra.Command{} + out := bytes.NewBuffer(make([]byte, 0, 3000)) + cmd.SetOutput(out) + err = k.exportKeys(cmd, nil) + require.NoError(t, err) + + bufOut, err := ioutil.ReadAll(out) + require.NoError(t, err) + require.Equal(t, outRes, bufOut) // should be identical output to file earlier +} + +func TestExportKeysByGUN(t *testing.T) { + setUp(t) + tempBaseDir, err := ioutil.TempDir("/tmp", "notary-test-") + require.NoError(t, err) + defer os.RemoveAll(tempBaseDir) + output, err := ioutil.TempFile("/tmp", "notary-test-import-") + require.NoError(t, err) + defer os.RemoveAll(output.Name()) + k := &keyCommander{ + configGetter: func() (*viper.Viper, error) { + v := viper.New() + v.SetDefault("trust_dir", tempBaseDir) + return v, nil + }, + } + k.outFile = output.Name() + err = output.Close() // close so export can open + require.NoError(t, err) + k.exportGUNs = []string{"ankh"} + + b := &pem.Block{} + b.Bytes = make([]byte, 1000) + rand.Read(b.Bytes) + + b2 := &pem.Block{} + b2.Bytes = make([]byte, 1000) + rand.Read(b2.Bytes) + + c := &pem.Block{} + c.Bytes = make([]byte, 1000) + rand.Read(c.Bytes) + + bBytes := pem.EncodeToMemory(b) + b2Bytes := pem.EncodeToMemory(b2) + cBytes := pem.EncodeToMemory(c) + require.NoError(t, err) + + fileStore, err := store.NewPrivateKeyFileStorage(tempBaseDir, notary.KeyExtension) + // we have to manually prepend the NonRootKeysSubdir because + // KeyStore would be expected to do this for us. + err = fileStore.Set( + filepath.Join(notary.NonRootKeysSubdir, "ankh/one"), + bBytes, + ) + require.NoError(t, err) + err = fileStore.Set( + filepath.Join(notary.NonRootKeysSubdir, "ankh/two"), + b2Bytes, + ) + require.NoError(t, err) + err = fileStore.Set( + filepath.Join(notary.NonRootKeysSubdir, "morpork/three"), + cBytes, + ) + require.NoError(t, err) + + err = k.exportKeys(&cobra.Command{}, nil) + require.NoError(t, err) + + outRes, err := ioutil.ReadFile(k.outFile) + require.NoError(t, err) + + block, rest := pem.Decode(outRes) + require.Equal(t, b.Bytes, block.Bytes) + require.Equal( + t, + filepath.Join(notary.NonRootKeysSubdir, "ankh/one"), + block.Headers["path"], + ) + + block, rest = pem.Decode(rest) + require.Equal(t, b2.Bytes, block.Bytes) + require.Equal( + t, + filepath.Join(notary.NonRootKeysSubdir, "ankh/two"), + block.Headers["path"], + ) + require.Len(t, rest, 0) +} + +func TestExportKeysByID(t *testing.T) { + setUp(t) + tempBaseDir, err := ioutil.TempDir("/tmp", "notary-test-") + require.NoError(t, err) + defer os.RemoveAll(tempBaseDir) + output, err := ioutil.TempFile("/tmp", "notary-test-import-") + require.NoError(t, err) + defer os.RemoveAll(output.Name()) + k := &keyCommander{ + configGetter: func() (*viper.Viper, error) { + v := viper.New() + v.SetDefault("trust_dir", tempBaseDir) + return v, nil + }, + } + k.outFile = output.Name() + err = output.Close() // close so export can open + require.NoError(t, err) + k.exportKeyIDs = []string{"one", "three"} + + b := &pem.Block{} + b.Bytes = make([]byte, 1000) + rand.Read(b.Bytes) + + b2 := &pem.Block{} + b2.Bytes = make([]byte, 1000) + rand.Read(b2.Bytes) + + c := &pem.Block{} + c.Bytes = make([]byte, 1000) + rand.Read(c.Bytes) + + bBytes := pem.EncodeToMemory(b) + b2Bytes := pem.EncodeToMemory(b2) + cBytes := pem.EncodeToMemory(c) + require.NoError(t, err) + + fileStore, err := store.NewPrivateKeyFileStorage(tempBaseDir, notary.KeyExtension) + err = fileStore.Set("ankh/one", bBytes) + require.NoError(t, err) + err = fileStore.Set("ankh/two", b2Bytes) + require.NoError(t, err) + err = fileStore.Set("morpork/three", cBytes) + require.NoError(t, err) + + err = k.exportKeys(&cobra.Command{}, nil) + require.NoError(t, err) + + outRes, err := ioutil.ReadFile(k.outFile) + require.NoError(t, err) + + block, rest := pem.Decode(outRes) + require.Equal(t, b.Bytes, block.Bytes) + require.Equal(t, "ankh/one", block.Headers["path"]) + + block, rest = pem.Decode(rest) + require.Equal(t, c.Bytes, block.Bytes) + require.Equal(t, "morpork/three", block.Headers["path"]) + require.Len(t, rest, 0) +} + +func TestExportKeysBadFlagCombo(t *testing.T) { + setUp(t) + tempBaseDir, err := ioutil.TempDir("/tmp", "notary-test-") + require.NoError(t, err) + defer os.RemoveAll(tempBaseDir) + output, err := ioutil.TempFile("/tmp", "notary-test-import-") + require.NoError(t, err) + defer os.RemoveAll(output.Name()) + k := &keyCommander{ + configGetter: func() (*viper.Viper, error) { + v := viper.New() + v.SetDefault("trust_dir", tempBaseDir) + return v, nil + }, + } + k.outFile = output.Name() + err = output.Close() // close so export can open + require.NoError(t, err) + k.exportGUNs = []string{"ankh"} + k.exportKeyIDs = []string{"one", "three"} + + err = k.exportKeys(&cobra.Command{}, nil) + require.Error(t, err) +} + func generateTempTestKeyFile(t *testing.T, role string) string { setUp(t) privKey, err := utils.GenerateECDSAKey(rand.Reader) diff --git a/cmd/notary/main_test.go b/cmd/notary/main_test.go index 40e3793b5..82683554e 100644 --- a/cmd/notary/main_test.go +++ b/cmd/notary/main_test.go @@ -161,6 +161,7 @@ var exampleValidCommands = []string{ "key generate rsa", "key remove e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "key passwd e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "key import backup.pem", "delegation list repo", "delegation add repo targets/releases path/to/pem/file.pem", "delegation remove repo targets/releases", diff --git a/const.go b/const.go index c6d136301..3a0a01cd8 100644 --- a/const.go +++ b/const.go @@ -36,6 +36,8 @@ const ( RootKeysSubdir = "root_keys" // NonRootKeysSubdir is the subdirectory under PrivDir where non-root private keys are stored NonRootKeysSubdir = "tuf_keys" + // KeyExtension is the file extension to use for private key files + KeyExtension = "key" // Day is a duration of one day Day = 24 * time.Hour diff --git a/storage/filestore.go b/storage/filestore.go index b99079bcc..8d2cf1c98 100644 --- a/storage/filestore.go +++ b/storage/filestore.go @@ -41,6 +41,13 @@ func NewSimpleFileStore(baseDir, fileExt string) (*FilesystemStore, error) { return NewFileStore(baseDir, fileExt, notary.PubCertPerms) } +// NewPrivateKeyFileStorage initializes a new filestore for private keys, appending +// the notary.PrivDir to the baseDir. +func NewPrivateKeyFileStorage(baseDir, fileExt string) (*FilesystemStore, error) { + baseDir = filepath.Join(baseDir, notary.PrivDir) + return NewFileStore(baseDir, fileExt, notary.PrivKeyPerms) +} + // NewPrivateSimpleFileStore is a wrapper to create an owner readable/writeable // _only_ filestore func NewPrivateSimpleFileStore(baseDir, fileExt string) (*FilesystemStore, error) { diff --git a/storage/httpstore_test.go b/storage/httpstore_test.go index 3af684cff..359fe82f2 100644 --- a/storage/httpstore_test.go +++ b/storage/httpstore_test.go @@ -48,7 +48,7 @@ func TestHTTPStoreGetSized(t *testing.T) { require.NoError(t, err) } -// Test that passing -1 to httpstore's GetMeta will return all content +// Test that passing -1 to httpstore's GetSized will return all content func TestHTTPStoreGetAllMeta(t *testing.T) { handler := func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(testRoot)) diff --git a/trustmanager/interfaces.go b/trustmanager/interfaces.go index 2611d436a..34bc128d2 100644 --- a/trustmanager/interfaces.go +++ b/trustmanager/interfaces.go @@ -62,10 +62,6 @@ func (err ErrKeyNotFound) Error() string { return fmt.Sprintf("signing key not found: %s", err.KeyID) } -const ( - keyExtension = "key" -) - // KeyStore is a generic interface for private key storage type KeyStore interface { // AddKey adds a key to the KeyStore, and if the key already exists, diff --git a/trustmanager/keystore.go b/trustmanager/keystore.go index c57d28f44..dca066769 100644 --- a/trustmanager/keystore.go +++ b/trustmanager/keystore.go @@ -36,8 +36,7 @@ type GenericKeyStore struct { // NewKeyFileStore returns a new KeyFileStore creating a private directory to // hold the keys. func NewKeyFileStore(baseDir string, p notary.PassRetriever) (*GenericKeyStore, error) { - baseDir = filepath.Join(baseDir, notary.PrivDir) - fileStore, err := store.NewPrivateSimpleFileStore(baseDir, keyExtension) + fileStore, err := store.NewPrivateKeyFileStorage(baseDir, notary.KeyExtension) if err != nil { return nil, err } diff --git a/trustmanager/yubikey/import.go b/trustmanager/yubikey/import.go new file mode 100644 index 000000000..08048ef52 --- /dev/null +++ b/trustmanager/yubikey/import.go @@ -0,0 +1,57 @@ +// +build pkcs11 + +package yubikey + +import ( + "encoding/pem" + "errors" + "github.com/docker/notary" + "github.com/docker/notary/trustmanager" + "github.com/docker/notary/tuf/utils" +) + +// YubiImport is a wrapper around the YubiStore that allows us to import private +// keys to the yubikey +type YubiImport struct { + dest *YubiStore + passRetriever notary.PassRetriever +} + +// NewImporter returns a wrapper for the YubiStore provided that enables importing +// keys via the simple Set(string, []byte) interface +func NewImporter(ys *YubiStore, ret notary.PassRetriever) *YubiImport { + return &YubiImport{ + dest: ys, + passRetriever: ret, + } +} + +// Set determines if we are allowed to set the given key on the Yubikey and +// calls through to YubiStore.AddKey if it's valid +func (s *YubiImport) Set(name string, bytes []byte) error { + block, _ := pem.Decode(bytes) + if block == nil { + return errors.New("invalid PEM data, could not parse") + } + role, ok := block.Headers["role"] + if !ok { + return errors.New("no role found for key") + } + ki := trustmanager.KeyInfo{ + // GUN is ignored by YubiStore + Role: role, + } + privKey, err := utils.ParsePEMPrivateKey(bytes, "") + if err != nil { + privKey, _, err = trustmanager.GetPasswdDecryptBytes( + s.passRetriever, + bytes, + name, + ki.Role, + ) + if err != nil { + return err + } + } + return s.dest.AddKey(ki, privKey) +} diff --git a/tuf/utils/utils.go b/tuf/utils/utils.go index 6cf499c47..8de72b679 100644 --- a/tuf/utils/utils.go +++ b/tuf/utils/utils.go @@ -5,7 +5,6 @@ import ( "crypto/sha512" "crypto/tls" "encoding/hex" - "errors" "fmt" "io" "net/http" @@ -151,37 +150,3 @@ func ConsistentName(role string, hashSha256 []byte) string { } return role } - -// ErrWrongLength indicates the length was different to that expected -var ErrWrongLength = errors.New("wrong length") - -// ErrWrongHash indicates the hash was different to that expected -type ErrWrongHash struct { - Type string - Expected []byte - Actual []byte -} - -// Error implements error interface -func (e ErrWrongHash) Error() string { - return fmt.Sprintf("wrong %s hash, expected %#x got %#x", e.Type, e.Expected, e.Actual) -} - -// ErrNoCommonHash indicates the metadata did not provide any hashes this -// client recognizes -type ErrNoCommonHash struct { - Expected data.Hashes - Actual data.Hashes -} - -// Error implements error interface -func (e ErrNoCommonHash) Error() string { - types := func(a data.Hashes) []string { - t := make([]string, 0, len(a)) - for typ := range a { - t = append(t, typ) - } - return t - } - return fmt.Sprintf("no common hash function, expected one of %s, got %s", types(e.Expected), types(e.Actual)) -} diff --git a/utils/keys.go b/utils/keys.go new file mode 100644 index 000000000..35fe4478d --- /dev/null +++ b/utils/keys.go @@ -0,0 +1,143 @@ +package utils + +import ( + "encoding/pem" + "fmt" + "github.com/Sirupsen/logrus" + "github.com/docker/notary" + "io" + "io/ioutil" + "path/filepath" + "sort" + "strings" +) + +// Exporter is a simple interface for the two functions we need from the Storage interface +type Exporter interface { + Get(string) ([]byte, error) + ListFiles() []string +} + +// Importer is a simple interface for the one function we need from the Storage interface +type Importer interface { + Set(string, []byte) error +} + +// ExportKeysByGUN exports all keys filtered to a GUN +func ExportKeysByGUN(to io.Writer, s Exporter, gun string) error { + keys := s.ListFiles() + sort.Strings(keys) // ensure consistenct. ListFiles has no order guarantee + for _, k := range keys { + dir := filepath.Dir(k) + if dir == gun { // must be full GUN match + if err := ExportKeys(to, s, k); err != nil { + return err + } + } + } + return nil +} + +// ExportKeysByID exports all keys matching the given ID +func ExportKeysByID(to io.Writer, s Exporter, ids []string) error { + want := make(map[string]struct{}) + for _, id := range ids { + want[id] = struct{}{} + } + keys := s.ListFiles() + for _, k := range keys { + id := filepath.Base(k) + if _, ok := want[id]; ok { + if err := ExportKeys(to, s, k); err != nil { + return err + } + } + } + return nil +} + +// ExportKeys copies a key from the store to the io.Writer +func ExportKeys(to io.Writer, s Exporter, from string) error { + // get PEM block + k, err := s.Get(from) + if err != nil { + return err + } + + gun := "" + if strings.HasPrefix(from, notary.NonRootKeysSubdir) { + // trim subdir + gun = strings.TrimPrefix(from, notary.NonRootKeysSubdir) + // trim filename + gun = filepath.Dir(gun) + // trim leading and trailing path separator + gun = strings.Trim(gun, fmt.Sprintf("%c", filepath.Separator)) + } + // parse PEM blocks if there are more than one + for block, rest := pem.Decode(k); block != nil; block, rest = pem.Decode(rest) { + // add from path in a header for later import + block.Headers["path"] = from + block.Headers["gun"] = gun + // write serialized PEM + err = pem.Encode(to, block) + if err != nil { + return err + } + } + return nil +} + +// ImportKeys expects an io.Reader containing one or more PEM blocks. +// It reads PEM blocks one at a time until pem.Decode returns a nil +// block. +// Each block is written to the subpath indicated in the "path" PEM +// header. If the file already exists, the file is truncated. Multiple +// adjacent PEMs with the same "path" header are appended together. +func ImportKeys(from io.Reader, to []Importer) error { + data, err := ioutil.ReadAll(from) + if err != nil { + return err + } + var ( + writeTo string + toWrite []byte + ) + for block, rest := pem.Decode(data); block != nil; block, rest = pem.Decode(rest) { + loc, ok := block.Headers["path"] + if !ok || loc == "" { + logrus.Info("failed to import key to store: PEM headers did not contain import path") + continue // don't know where to copy this key. Skip it. + } + if loc != writeTo { + // next location is different from previous one. We've finished aggregating + // data for the previous file. If we have data, write the previous file, + // the clear toWrite and set writeTo to the next path we're going to write + if toWrite != nil { + if err = importToStores(to, writeTo, toWrite); err != nil { + return err + } + } + // set up for aggregating next file's data + toWrite = nil + writeTo = loc + } + delete(block.Headers, "path") + toWrite = append(toWrite, pem.EncodeToMemory(block)...) + } + if toWrite != nil { // close out final iteration if there's data left + return importToStores(to, writeTo, toWrite) + } + return nil +} + +func importToStores(to []Importer, path string, bytes []byte) error { + var err error + for _, i := range to { + if err = i.Set(path, bytes); err != nil { + logrus.Errorf("failed to import key to store: %s", err.Error()) + continue + } + break + } + return err +} diff --git a/utils/keys_test.go b/utils/keys_test.go new file mode 100644 index 000000000..4e6a979e6 --- /dev/null +++ b/utils/keys_test.go @@ -0,0 +1,317 @@ +package utils + +import ( + "bytes" + "crypto/rand" + "encoding/pem" + "errors" + "github.com/stretchr/testify/require" + "io/ioutil" + "testing" +) + +type TestImportStore struct { + data map[string][]byte +} + +func NewTestImportStore() *TestImportStore { + return &TestImportStore{ + data: make(map[string][]byte), + } +} + +func (s *TestImportStore) Set(name string, data []byte) error { + s.data[name] = data + return nil +} + +type TestExportStore struct { + data map[string][]byte +} + +func NewTestExportStore() *TestExportStore { + return &TestExportStore{ + data: make(map[string][]byte), + } +} + +func (s *TestExportStore) Get(name string) ([]byte, error) { + if data, ok := s.data[name]; ok { + return data, nil + } + return nil, errors.New("Not Found") +} + +func (s *TestExportStore) ListFiles() []string { + files := make([]string, 0, len(s.data)) + for k := range s.data { + files = append(files, k) + } + return files +} + +func TestExportKeys(t *testing.T) { + s := NewTestExportStore() + + b := &pem.Block{} + b.Bytes = make([]byte, 1000) + rand.Read(b.Bytes) + + c := &pem.Block{} + c.Bytes = make([]byte, 1000) + rand.Read(c.Bytes) + + bBytes := pem.EncodeToMemory(b) + cBytes := pem.EncodeToMemory(c) + + s.data["ankh"] = bBytes + s.data["morpork"] = cBytes + + buf := bytes.NewBuffer(nil) + + err := ExportKeys(buf, s, "ankh") + require.NoError(t, err) + + err = ExportKeys(buf, s, "morpork") + require.NoError(t, err) + + out, err := ioutil.ReadAll(buf) + require.NoError(t, err) + + bFinal, rest := pem.Decode(out) + require.Equal(t, b.Bytes, bFinal.Bytes) + require.Equal(t, "ankh", bFinal.Headers["path"]) + + cFinal, rest := pem.Decode(rest) + require.Equal(t, c.Bytes, cFinal.Bytes) + require.Equal(t, "morpork", cFinal.Headers["path"]) + require.Len(t, rest, 0) +} + +func TestExportKeysByGUN(t *testing.T) { + s := NewTestExportStore() + + b := &pem.Block{} + b.Bytes = make([]byte, 1000) + rand.Read(b.Bytes) + + b2 := &pem.Block{} + b2.Bytes = make([]byte, 1000) + rand.Read(b2.Bytes) + + c := &pem.Block{} + c.Bytes = make([]byte, 1000) + rand.Read(c.Bytes) + + bBytes := pem.EncodeToMemory(b) + b2Bytes := pem.EncodeToMemory(b2) + cBytes := pem.EncodeToMemory(c) + + s.data["ankh/one"] = bBytes + s.data["ankh/two"] = b2Bytes + s.data["morpork/three"] = cBytes + + buf := bytes.NewBuffer(nil) + + err := ExportKeysByGUN(buf, s, "ankh") + require.NoError(t, err) + + out, err := ioutil.ReadAll(buf) + require.NoError(t, err) + + bFinal, rest := pem.Decode(out) + require.Equal(t, b.Bytes, bFinal.Bytes) + require.Equal(t, "ankh/one", bFinal.Headers["path"]) + + b2Final, rest := pem.Decode(rest) + require.Equal(t, b2.Bytes, b2Final.Bytes) + require.Equal(t, "ankh/two", b2Final.Headers["path"]) + require.Len(t, rest, 0) +} + +func TestExportKeysByID(t *testing.T) { + s := NewTestExportStore() + + b := &pem.Block{} + b.Bytes = make([]byte, 1000) + rand.Read(b.Bytes) + + c := &pem.Block{} + c.Bytes = make([]byte, 1000) + rand.Read(c.Bytes) + + bBytes := pem.EncodeToMemory(b) + cBytes := pem.EncodeToMemory(c) + + s.data["ankh"] = bBytes + s.data["morpork/identifier"] = cBytes + + buf := bytes.NewBuffer(nil) + + err := ExportKeysByID(buf, s, []string{"identifier"}) + require.NoError(t, err) + + out, err := ioutil.ReadAll(buf) + require.NoError(t, err) + + cFinal, rest := pem.Decode(out) + require.Equal(t, c.Bytes, cFinal.Bytes) + require.Equal(t, "morpork/identifier", cFinal.Headers["path"]) + require.Len(t, rest, 0) +} + +func TestExport2InOneFile(t *testing.T) { + s := NewTestExportStore() + + b := &pem.Block{} + b.Bytes = make([]byte, 1000) + rand.Read(b.Bytes) + + b2 := &pem.Block{} + b2.Bytes = make([]byte, 1000) + rand.Read(b2.Bytes) + + c := &pem.Block{} + c.Bytes = make([]byte, 1000) + rand.Read(c.Bytes) + + bBytes := pem.EncodeToMemory(b) + b2Bytes := pem.EncodeToMemory(b2) + bBytes = append(bBytes, b2Bytes...) + cBytes := pem.EncodeToMemory(c) + + s.data["ankh"] = bBytes + s.data["morpork"] = cBytes + + buf := bytes.NewBuffer(nil) + + err := ExportKeys(buf, s, "ankh") + require.NoError(t, err) + + err = ExportKeys(buf, s, "morpork") + require.NoError(t, err) + + out, err := ioutil.ReadAll(buf) + require.NoError(t, err) + + bFinal, rest := pem.Decode(out) + require.Equal(t, b.Bytes, bFinal.Bytes) + require.Equal(t, "ankh", bFinal.Headers["path"]) + + b2Final, rest := pem.Decode(rest) + require.Equal(t, b2.Bytes, b2Final.Bytes) + require.Equal(t, "ankh", b2Final.Headers["path"]) + + cFinal, rest := pem.Decode(rest) + require.Equal(t, c.Bytes, cFinal.Bytes) + require.Equal(t, "morpork", cFinal.Headers["path"]) + require.Len(t, rest, 0) +} + +func TestImportKeys(t *testing.T) { + s := NewTestImportStore() + + b := &pem.Block{ + Headers: make(map[string]string), + } + b.Bytes = make([]byte, 1000) + rand.Read(b.Bytes) + b.Headers["path"] = "ankh" + + c := &pem.Block{ + Headers: make(map[string]string), + } + c.Bytes = make([]byte, 1000) + rand.Read(c.Bytes) + c.Headers["path"] = "morpork" + + bBytes := pem.EncodeToMemory(b) + cBytes := pem.EncodeToMemory(c) + + byt := append(bBytes, cBytes...) + + in := bytes.NewBuffer(byt) + + err := ImportKeys(in, []Importer{s}) + require.NoError(t, err) + + bFinal, bRest := pem.Decode(s.data["ankh"]) + require.Equal(t, b.Bytes, bFinal.Bytes) + require.Len(t, bFinal.Headers, 0) // path header is stripped during import + require.Len(t, bRest, 0) + + cFinal, cRest := pem.Decode(s.data["morpork"]) + require.Equal(t, c.Bytes, cFinal.Bytes) + require.Len(t, cFinal.Headers, 0) + require.Len(t, cRest, 0) +} + +func TestImportNoPath(t *testing.T) { + s := NewTestImportStore() + + b := &pem.Block{ + Headers: make(map[string]string), + } + b.Bytes = make([]byte, 1000) + rand.Read(b.Bytes) + + bBytes := pem.EncodeToMemory(b) + + in := bytes.NewBuffer(bBytes) + + err := ImportKeys(in, []Importer{s}) + require.NoError(t, err) + + require.Len(t, s.data, 0) +} + +func TestImportKeys2InOneFile(t *testing.T) { + s := NewTestImportStore() + + b := &pem.Block{ + Headers: make(map[string]string), + } + b.Bytes = make([]byte, 1000) + rand.Read(b.Bytes) + b.Headers["path"] = "ankh" + + b2 := &pem.Block{ + Headers: make(map[string]string), + } + b2.Bytes = make([]byte, 1000) + rand.Read(b2.Bytes) + b2.Headers["path"] = "ankh" + + c := &pem.Block{ + Headers: make(map[string]string), + } + c.Bytes = make([]byte, 1000) + rand.Read(c.Bytes) + c.Headers["path"] = "morpork" + + bBytes := pem.EncodeToMemory(b) + b2Bytes := pem.EncodeToMemory(b2) + bBytes = append(bBytes, b2Bytes...) + cBytes := pem.EncodeToMemory(c) + + byt := append(bBytes, cBytes...) + + in := bytes.NewBuffer(byt) + + err := ImportKeys(in, []Importer{s}) + require.NoError(t, err) + + bFinal, bRest := pem.Decode(s.data["ankh"]) + require.Equal(t, b.Bytes, bFinal.Bytes) + require.Len(t, bFinal.Headers, 0) // path header is stripped during import + + b2Final, b2Rest := pem.Decode(bRest) + require.Equal(t, b2.Bytes, b2Final.Bytes) + require.Len(t, b2Final.Headers, 0) // path header is stripped during import + require.Len(t, b2Rest, 0) + + cFinal, cRest := pem.Decode(s.data["morpork"]) + require.Equal(t, c.Bytes, cFinal.Bytes) + require.Len(t, cFinal.Headers, 0) + require.Len(t, cRest, 0) +}