commit 8d5fb64edeea48893381ebe4a85058fb28849cc2
parent d61c1f8a3acd8e66fe7e8d2e81d86cc6367a9087
Author: gearsix <gearsix@tuta.io>
Date: Tue, 13 Apr 2021 11:18:08 +0100
major rework to data.go; made api more in-line with std golib
- removed type Data = the user will want to sue their own data type to
load data into
- LoadData now writes output to an interface{} (pointer) passed in
params
- LoadDataFile now takes an interface{} (pointer) to write output to
as well
- removed LoadDataFiles() = user responsibility
- removed GenerateSuperData() = superfluous
- removed MergeData() = superfluous
- updated data_test.go to reflect changes
Diffstat:
M | data.go | | | 231 | ++++++++----------------------------------------------------------------------- |
M | data_test.go | | | 155 | ++++++++++++++++--------------------------------------------------------------- |
2 files changed, 52 insertions(+), 334 deletions(-)
diff --git a/data.go b/data.go
@@ -26,232 +26,45 @@ import (
"io/ioutil"
"os"
"path/filepath"
- "sort"
- "strings"
- "time"
)
-// Data is the data type used to represent parsed Data (in any format).
-type Data map[string]interface{}
-
-func getDataType(path string) string {
- return strings.TrimPrefix(filepath.Ext(path), ".")
-}
-
-func loadGlobPaths(paths ...string) ([]string, error) {
- var err error
- var glob []string
- for p, path := range paths {
- if strings.Contains(path, "*") {
- if glob, err = filepath.Glob(path); err == nil {
- paths = append(paths, glob...)
- if len(glob) > 0 {
- paths = append(paths[:p], paths[p+1:]...)
- }
- }
- }
- }
- return paths, err
-}
-
-// LoadData reads all data from `in` and loads it in the format set in `lang`.
-func LoadData(lang string, in io.Reader) (d Data, e error) {
- var inbuf []byte
- if inbuf, e = ioutil.ReadAll(in); e != nil {
- return make(Data), e
+// LoadData attempts to load all data from `in` as the data language `lang`
+// and writes the result in the pointer `outp`.
+func LoadData(lang string, in io.Reader, outp interface{}) error {
+ inbuf, e := ioutil.ReadAll(in)
+ if e != nil {
+ return e
} else if len(inbuf) == 0 {
- return make(Data), nil
+ return nil
}
if lang == "json" {
- e = json.Unmarshal(inbuf, &d)
+ e = json.Unmarshal(inbuf, outp)
} else if lang == "yaml" {
- e = yaml.Unmarshal(inbuf, &d)
+ e = yaml.Unmarshal(inbuf, outp)
} else if lang == "toml" {
- e = toml.Unmarshal(inbuf, &d)
+ e = toml.Unmarshal(inbuf, outp)
} else {
e = fmt.Errorf("'%s' is not a supported data language", lang)
}
- return
+ return e
}
// LoadDataFile loads all the data from the file found at `path` into the the
-// format of that files file extension (e.g. "x.json" will be loaded as a json).
-func LoadDataFile(path string) (d Data, e error) {
- var f *os.File
- if f, e = os.Open(path); e == nil {
- d, e = LoadData(getDataType(path), f)
- }
- f.Close()
- if e != nil {
- e = fmt.Errorf("failed to load data '%s':", e.Error())
- }
- return
-}
-
-// LoadDataFiles loads all files in `paths` recursively and sorted them in
-// `order`.
-func LoadDataFiles(order string, paths ...string) (data []Data, err error) {
- var stat os.FileInfo
-
- paths, err = loadGlobPaths(paths...)
-
- loaded := make(map[string]Data)
-
- for i := 0; i < len(paths) && err == nil; i++ {
- path := paths[i]
- if stat, err = os.Stat(path); err == nil {
- if stat.IsDir() {
- err = filepath.Walk(path,
- func(p string, fi os.FileInfo, e error) error {
- if e == nil && !fi.IsDir() {
- loaded[p], e = LoadDataFile(p)
- }
- return e
- })
- } else {
- loaded[path], err = LoadDataFile(path)
- }
- }
- }
-
- if err == nil {
- data, err = sortFileData(loaded, order)
- }
-
- return data, err
-}
-
-func sortFileData(data map[string]Data, order string) ([]Data, error) {
- var err error
- sorted := make([]Data, 0, len(data))
-
- if strings.HasPrefix(order, "filename") {
- if order == "filename-desc" {
- sorted = sortFileDataFilename("desc", data)
- } else if order == "filename-asc" {
- sorted = sortFileDataFilename("asc", data)
- } else {
- sorted = sortFileDataFilename("asc", data)
- }
- } else if strings.HasPrefix(order, "modified") {
- if order == "modified-desc" {
- sorted, err = sortFileDataModified("desc", data)
- } else if order == "modified-asc" {
- sorted, err = sortFileDataModified("asc", data)
- } else {
- sorted, err = sortFileDataModified("asc", data)
- }
- } else {
- for _, d := range data {
- sorted = append(sorted, d)
- }
- }
-
- return sorted, err
-}
-
-func sortFileDataFilename(direction string, data map[string]Data) []Data {
- sorted := make([]Data, 0, len(data))
-
- fnames := make([]string, 0, len(data))
- for fpath := range data {
- fnames = append(fnames, filepath.Base(fpath))
- }
- sort.Strings(fnames)
-
- if direction == "desc" {
- for i := len(fnames) - 1; i >= 0; i-- {
- for fpath, d := range data {
- if fnames[i] == filepath.Base(fpath) {
- sorted = append(sorted, d)
- }
- }
- }
- } else {
- for _, fname := range fnames {
- for fpath, d := range data {
- if fname == filepath.Base(fpath) {
- sorted = append(sorted, d)
- }
- }
+// format of that files extension (e.g. "x.json" will be loaded as a json).
+// The result is written to the value pointed at by `outp`.
+func LoadDataFile(path string, outp interface{}) error {
+ f, e := os.Open(path)
+ defer f.Close()
+
+ if e == nil {
+ lang := filepath.Ext(path)[1:] // don't include '.'
+ if e = LoadData(lang, f, outp); e != nil {
+ e = fmt.Errorf("failed to load data '%s':", e.Error())
}
}
- return sorted
+ return e
}
-func sortFileDataModified(direction string, data map[string]Data) ([]Data, error) {
- sorted := make([]Data, 0, len(data))
-
- stats := make(map[string]os.FileInfo)
- for fpath := range data {
- if stat, err := os.Stat(fpath); err != nil {
- return nil, err
- } else {
- stats[fpath] = stat
- }
- }
-
- modtimes := make([]time.Time, 0, len(data))
- for _, stat := range stats {
- modtimes = append(modtimes, stat.ModTime())
- }
-
- if direction == "desc" {
- sort.Slice(modtimes, func(i, j int) bool {
- return modtimes[i].After(modtimes[j])
- })
- } else {
- sort.Slice(modtimes, func(i, j int) bool {
- return modtimes[i].Before(modtimes[j])
- })
- }
-
- for _, t := range modtimes {
- for fpath, stat := range stats {
- if stat.ModTime() == t {
- sorted = append(sorted, data[fpath])
- delete(stats, fpath)
- break
- }
- }
- }
-
- return sorted, nil
-}
-
-// GenerateSuperData simply addeds a key named `datakey` to `global` and assigns `data` to it
-func GenerateSuperData(datakey string, global Data, data []Data) (super Data, err error) {
- super = global
-
- if len(datakey) == 0 {
- datakey = "data"
- }
-
- if super[datakey] != nil {
- err = fmt.Errorf("datakey '%s' already exists", datakey)
- } else {
- super[datakey] = data
- }
-
- return
-}
-
-// MergeData combines all keys in `data` into a single Data object. If there's
-// a conflict (duplicate key), the first found value is kept and the conflicting
-// values are ignored.
-func MergeData(data ...Data) (merged Data, conflicts []string) {
- merged = make(Data)
- for _, d := range data {
- for key, val := range d {
- if merged[key] == nil {
- merged[key] = val
- } else {
- conflicts = append(conflicts, key)
- }
- }
- }
- return
-}
diff --git a/data_test.go b/data_test.go
@@ -24,7 +24,6 @@ import (
"os"
"strings"
"testing"
- "time"
)
var good = map[string]string{
@@ -51,15 +50,12 @@ func writeTestFile(t *testing.T, path string, Data string) {
return
}
-func validateData(t *testing.T, d Data, e error, lang string) {
+func validateData(t *testing.T, d interface{}, e error, lang string) {
var b []byte
if e != nil {
t.Error(e)
}
- if len(d) == 0 {
- t.Error("no data loaded")
- }
switch lang {
case "json":
@@ -79,148 +75,57 @@ func validateData(t *testing.T, d Data, e error, lang string) {
}
func TestLoadData(t *testing.T) {
- var d Data
+ var d interface{}
var e error
for lang, data := range good {
- d, e = LoadData(lang, strings.NewReader(data))
+ e = LoadData(lang, strings.NewReader(data), &d)
validateData(t, d, e, lang)
-
- if d, e = LoadData(lang, strings.NewReader(badData)); e == nil || len(d) > 0 {
- t.Errorf("bad %s passed", lang)
- }
-
- if d, e = LoadData(lang, strings.NewReader("")); e != nil {
- t.Errorf("empty file failed for json: %s, %s", d, e)
- }
}
- if d, e = LoadData("invalid", strings.NewReader("shouldn't pass")); e == nil || len(d) > 0 {
- t.Errorf("invalid data language passed: %s, %s", d, e)
+ if e = LoadData("json", strings.NewReader(badData), &d); e == nil {
+ t.Errorf("bad data passed")
}
-
- return
-}
-
-func validateFileData(t *testing.T, e error, d []Data, dlen int, orderedLangs ...string) {
- if e != nil {
- t.Error(e)
+ if e = LoadData("toml", strings.NewReader(""), &d); e != nil {
+ t.Errorf("empty data failed %s, %s", d, e)
}
-
- if dlen != len(orderedLangs) {
- t.Errorf("invalid orderedLangs length (%d should be %d)", len(orderedLangs), dlen)
- }
-
- if len(d) != dlen {
- t.Errorf("invalid data length (%d should be %d)", len(d), dlen)
+ if e = LoadData("void", strings.NewReader("shouldn't pass"), &d); e == nil {
+ t.Errorf("invalid data language passed: %s, %s", d, e)
}
- for i, lang := range orderedLangs {
- validateData(t, d[i], nil, lang)
- }
+ return
}
-func TestLoadDataFiles(t *testing.T) {
+func TestLoadDataFile(t *testing.T) {
+ var d interface{}
var e error
- var p []string
- var d []Data
+ var p string
tdir := t.TempDir()
- p = append(p, tdir+"/1.yaml")
- writeTestFile(t, p[len(p)-1], good["yaml"])
- time.Sleep(100 * time.Millisecond)
- p = append(p, tdir+"/good.json")
- writeTestFile(t, p[len(p)-1], good["json"])
- time.Sleep(100 * time.Millisecond)
- p = append(p, tdir+"/good.toml")
- writeTestFile(t, p[len(p)-1], good["toml"])
-
- d, e = LoadDataFiles("filename", tdir)
- validateFileData(t, e, d, len(p), "yaml", "json", "toml")
-
- d, e = LoadDataFiles("filename-desc", tdir+"/*")
- validateFileData(t, e, d, len(p), "toml", "json", "yaml")
-
- d, e = LoadDataFiles("modified", p...)
- validateFileData(t, e, d, len(p), "yaml", "json", "toml")
-
- d, e = LoadDataFiles("modified-desc", p...)
- validateFileData(t, e, d, len(p), "toml", "json", "yaml")
-
- p = append(p, tdir+"/bad.json")
- writeTestFile(t, p[len(p)-1], badData)
- if _, e = LoadDataFiles("modified-desc", p...); e == nil {
- t.Error("bad.json passed")
+ for lang, data := range good {
+ p = tdir + "/good." + lang
+ writeTestFile(t, p, data)
+ e = LoadDataFile(p, &d)
+ validateData(t, d, e, lang)
}
-}
-func TestGenerateSuperData(t *testing.T) {
- var data Data
- var e error
- var gd Data
- var d []Data
- var sd Data
-
- if data, e = LoadData("json", strings.NewReader(good["json"])); e == nil {
- gd = data
- } else {
- t.Skip("setup failure:", e)
- }
- if data, e = LoadData("json", strings.NewReader(good["json"])); e == nil {
- d = append(d, data)
- } else {
- t.Skip("setup failure:", e)
- }
- if data, e = LoadData("yaml", strings.NewReader(good["yaml"])); e == nil {
- d = append(d, data)
- } else {
- t.Skip("setup failure:", e)
+ p = tdir + "/bad.json"
+ writeTestFile(t, p, badData)
+ e = LoadDataFile(p, &d)
+ if e == nil {
+ t.Errorf("bad data passed")
}
- sd, e = GenerateSuperData("testdata", gd, d)
+ p = tdir + "/empty.json"
+ writeTestFile(t, p, "")
+ e = LoadDataFile(p, &d)
if e != nil {
- t.Error(e)
+ t.Errorf("empty file failed: %s", e)
}
- if sd["testdata"] == nil {
- t.Log(sd)
- t.Error("datakey is empty")
- }
- if v, ok := sd["testdata"].([]Data); ok == false {
- t.Log(sd)
- t.Error("unable to infer datakey 'testdata'")
- } else if len(v) < len(data) {
- t.Log(sd)
- t.Error("datakey is missing data")
- }
-}
-func TestMergeData(t *testing.T) {
- var e error
- var d []Data
- var m Data
- var c []string
-
- if m, e = LoadData("json", strings.NewReader(good["json"])); e == nil {
- d = append(d, m)
- } else {
- t.Skip("setup failure:", e)
- }
- if m, e = LoadData("json", strings.NewReader(good["json"])); e == nil {
- d = append(d, m)
- } else {
- t.Skip("setup failure:", e)
- }
- if m, e = LoadData("yaml", strings.NewReader(good["yaml"])); e == nil {
- d = append(d, m)
- } else {
- t.Skip("setup failure:", e)
+ if e = LoadDataFile("non-existing-file.toml", &d); e == nil {
+ t.Errorf("non-existing file passed: %s, %s", d, e)
}
- m, c = MergeData(d...)
- if m["eg"] == nil {
- t.Error("missing global keys")
- }
- if len(c) == 0 {
- t.Errorf("conflicting keys were not reported")
- }
+ return
}