feat: minor improvements

- allow to install pkg that is already locked
- clean unknown files from vendor
- correctly handle checksums and locked versions (was accidentally ignoring
- these before)
This commit is contained in:
sh0rez 2019-10-16 20:38:00 +02:00
parent cc1d7ea3b8
commit 0588b89c07
No known key found for this signature in database
GPG key ID: 87C71DF9F8181FF1
3 changed files with 95 additions and 18 deletions

View file

@ -19,6 +19,7 @@ import (
"io/ioutil" "io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
"reflect"
"github.com/pkg/errors" "github.com/pkg/errors"
kingpin "gopkg.in/alecthomas/kingpin.v2" kingpin "gopkg.in/alecthomas/kingpin.v2"
@ -33,23 +34,30 @@ func installCommand(dir, jsonnetHome string, uris []string) int {
dir = "." dir = "."
} }
kingpin.FatalIfError(
os.MkdirAll(filepath.Join(dir, jsonnetHome, ".tmp"), os.ModePerm),
"creating vendor folder")
jsonnetFile, err := jsonnetfile.Load(filepath.Join(dir, jsonnetfile.File)) jsonnetFile, err := jsonnetfile.Load(filepath.Join(dir, jsonnetfile.File))
kingpin.FatalIfError(err, "failed to load jsonnetfile") kingpin.FatalIfError(err, "failed to load jsonnetfile")
for _, u := range uris {
d := parseDependency(dir, u)
jsonnetFile.Dependencies[d.Name] = *d
}
lockFile, err := jsonnetfile.Load(filepath.Join(dir, jsonnetfile.LockFile)) lockFile, err := jsonnetfile.Load(filepath.Join(dir, jsonnetfile.LockFile))
if !os.IsNotExist(err) { if !os.IsNotExist(err) {
kingpin.FatalIfError(err, "failed to load lockfile") kingpin.FatalIfError(err, "failed to load lockfile")
} }
kingpin.FatalIfError(
os.MkdirAll(filepath.Join(dir, jsonnetHome, ".tmp"), os.ModePerm),
"creating vendor folder")
for _, u := range uris {
d := parseDependency(dir, u)
if !depEqual(jsonnetFile.Dependencies[d.Name], *d) {
// the dep passed on the cli is different from the jsonnetFile
jsonnetFile.Dependencies[d.Name] = *d
// we want to install the passed version (ignore the lock)
delete(lockFile.Dependencies, d.Name)
}
}
locked, err := pkg.Ensure(jsonnetFile, jsonnetHome, lockFile.Dependencies) locked, err := pkg.Ensure(jsonnetFile, jsonnetHome, lockFile.Dependencies)
kingpin.FatalIfError(err, "failed to install packages") kingpin.FatalIfError(err, "failed to install packages")
@ -63,6 +71,14 @@ func installCommand(dir, jsonnetHome string, uris []string) int {
return 0 return 0
} }
func depEqual(d1, d2 spec.Dependency) bool {
name := d1.Name == d2.Name
version := d1.Version == d2.Version
source := reflect.DeepEqual(d1.Source, d2.Source)
return name && version && source
}
func writeJSONFile(name string, d interface{}) error { func writeJSONFile(name string, d interface{}) error {
b, err := json.MarshalIndent(d, "", " ") b, err := json.MarshalIndent(d, "", " ")
if err != nil { if err != nil {

View file

@ -21,6 +21,7 @@ import (
"path/filepath" "path/filepath"
"regexp" "regexp"
"github.com/fatih/color"
"github.com/pkg/errors" "github.com/pkg/errors"
kingpin "gopkg.in/alecthomas/kingpin.v2" kingpin "gopkg.in/alecthomas/kingpin.v2"
@ -54,6 +55,8 @@ func Main() int {
JsonnetHome string JsonnetHome string
}{} }{}
color.Output = color.Error
a := kingpin.New(filepath.Base(os.Args[0]), "A jsonnet package manager") a := kingpin.New(filepath.Base(os.Args[0]), "A jsonnet package manager")
a.HelpFlag.Short('h') a.HelpFlag.Short('h')

View file

@ -23,6 +23,7 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"github.com/fatih/color"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/jsonnet-bundler/jsonnet-bundler/pkg/jsonnetfile" "github.com/jsonnet-bundler/jsonnet-bundler/pkg/jsonnetfile"
@ -33,18 +34,66 @@ var (
VersionMismatch = errors.New("multiple colliding versions specified") VersionMismatch = errors.New("multiple colliding versions specified")
) )
func Ensure(want spec.JsonnetFile, vendorDir string, locks map[string]spec.Dependency) (map[string]spec.Dependency, error) { // Ensure receives all direct packages as, the directory to vendor in and all known locks.
// It then makes sure all direct and nested dependencies are present in vendor at the correct version:
//
// If the package is locked and the files in vendor match the sha256 checksum,
// nothing needs to be done. Otherwise, the package is retrieved from the
// upstream source and added into vendor. If previously locked, the sums are
// checked as well.
// In case a (nested) package is already present in the lock,
// the one from the lock takes precedence. This allows the user to set the
// desired version in case by `jb install`ing it.
//
// Finally, all unknown files and directories are removed from vendor/
func Ensure(direct spec.JsonnetFile, vendorDir string, locks map[string]spec.Dependency) (map[string]spec.Dependency, error) {
// ensure all required files are in vendor
deps, err := ensure(direct.Dependencies, vendorDir, locks)
if err != nil {
return nil, err
}
// cleanup unknown dirs from vendor/
f, err := os.Open(vendorDir)
if err != nil {
return nil, err
}
names, err := f.Readdirnames(0)
if err != nil {
return nil, err
}
for _, name := range names {
if _, ok := deps[name]; !ok {
dir := filepath.Join(vendorDir, name)
if err := os.RemoveAll(dir); err != nil {
return nil, err
}
if name != ".tmp" {
color.Magenta("CLEAN %s", dir)
}
}
}
// return the final lockfile contents
return deps, nil
}
func ensure(direct map[string]spec.Dependency, vendorDir string, locks map[string]spec.Dependency) (map[string]spec.Dependency, error) {
deps := make(map[string]spec.Dependency) deps := make(map[string]spec.Dependency)
for _, d := range want.Dependencies { for _, d := range direct {
l, present := locks[d.Name] l, present := locks[d.Name]
// already locked and the integrity is intact // already locked and the integrity is intact
if present && check(l, vendorDir) { if present {
d.Version = locks[d.Name].Version
if check(l, vendorDir) {
deps[d.Name] = l deps[d.Name] = l
continue continue
} }
expectedSum := d.Sum }
expectedSum := locks[d.Name].Sum
// either not present or not intact: download again // either not present or not intact: download again
dir := filepath.Join(vendorDir, d.Name) dir := filepath.Join(vendorDir, d.Name)
@ -54,10 +103,12 @@ func Ensure(want spec.JsonnetFile, vendorDir string, locks map[string]spec.Depen
if err != nil { if err != nil {
return nil, errors.Wrap(err, "downloading") return nil, errors.Wrap(err, "downloading")
} }
if expectedSum != "" && d.Sum != expectedSum { if expectedSum != "" && locked.Sum != expectedSum {
return nil, fmt.Errorf("checksum mismatch for %s. Expected %s but got %s", d.Name, expectedSum, d.Sum) return nil, fmt.Errorf("checksum mismatch for %s. Expected %s but got %s", d.Name, expectedSum, locked.Sum)
} }
deps[d.Name] = *locked deps[d.Name] = *locked
// we settled on a new version, add it to the locks for recursion
locks[d.Name] = *locked
} }
for _, d := range deps { for _, d := range deps {
@ -69,7 +120,7 @@ func Ensure(want spec.JsonnetFile, vendorDir string, locks map[string]spec.Depen
return nil, err return nil, err
} }
nested, err := Ensure(f, vendorDir, locks) nested, err := ensure(f.Dependencies, vendorDir, locks)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -84,6 +135,8 @@ func Ensure(want spec.JsonnetFile, vendorDir string, locks map[string]spec.Depen
return deps, nil return deps, nil
} }
// download retrieves a package from a remote upstream. The checksum of the
// files is generated afterwards.
func download(d spec.Dependency, vendorDir string) (*spec.Dependency, error) { func download(d spec.Dependency, vendorDir string) (*spec.Dependency, error) {
var p Interface var p Interface
switch { switch {
@ -112,6 +165,8 @@ func download(d spec.Dependency, vendorDir string) (*spec.Dependency, error) {
}, nil }, nil
} }
// check returns whether the files present at the vendor/ folder match the
// sha256 sum of the package
func check(d spec.Dependency, vendorDir string) bool { func check(d spec.Dependency, vendorDir string) bool {
if d.Sum == "" { if d.Sum == "" {
// no sum available, need to download // no sum available, need to download
@ -123,6 +178,9 @@ func check(d spec.Dependency, vendorDir string) bool {
return d.Sum == sum return d.Sum == sum
} }
// hashDir computes the checksum of a directory by concatenating all files and
// hashing this data using sha256. This can be memory heavy with lots of data,
// but jsonnet files should be fairly small
func hashDir(dir string) string { func hashDir(dir string) string {
hasher := sha256.New() hasher := sha256.New()