mirror of
https://github.com/schollz/cowyo.git
synced 2023-08-10 21:13:00 +03:00
Gofmt
This commit is contained in:
parent
dfd9aea863
commit
131a54a682
66
migrate.go
66
migrate.go
@ -1,33 +1,33 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"path"
|
"path"
|
||||||
)
|
)
|
||||||
|
|
||||||
func migrate(pathToOldData, pathToData string) error {
|
func migrate(pathToOldData, pathToData string) error {
|
||||||
files, err := ioutil.ReadDir(pathToOldData)
|
files, err := ioutil.ReadDir(pathToOldData)
|
||||||
if len(files) == 0 {
|
if len(files) == 0 {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
if f.Mode().IsDir() {
|
if f.Mode().IsDir() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
fmt.Printf("Migrating %s", f.Name())
|
fmt.Printf("Migrating %s", f.Name())
|
||||||
p := Open(f.Name())
|
p := Open(f.Name())
|
||||||
bData, err := ioutil.ReadFile(path.Join(pathToOldData, f.Name()))
|
bData, err := ioutil.ReadFile(path.Join(pathToOldData, f.Name()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
err = p.Update(string(bData))
|
err = p.Update(string(bData))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err = p.Save(); err != nil {
|
if err = p.Save(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
326
page.go
326
page.go
@ -1,163 +1,163 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/schollz/versionedtext"
|
"github.com/schollz/versionedtext"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Page is the basic struct
|
// Page is the basic struct
|
||||||
type Page struct {
|
type Page struct {
|
||||||
Name string
|
Name string
|
||||||
Text versionedtext.VersionedText
|
Text versionedtext.VersionedText
|
||||||
Meta string
|
Meta string
|
||||||
RenderedPage string
|
RenderedPage string
|
||||||
IsLocked bool
|
IsLocked bool
|
||||||
PassphraseToUnlock string
|
PassphraseToUnlock string
|
||||||
IsEncrypted bool
|
IsEncrypted bool
|
||||||
IsPrimedForSelfDestruct bool
|
IsPrimedForSelfDestruct bool
|
||||||
IsPublished bool
|
IsPublished bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p Page) LastEditTime() time.Time {
|
func (p Page) LastEditTime() time.Time {
|
||||||
return time.Unix(p.LastEditUnixTime(), 0)
|
return time.Unix(p.LastEditUnixTime(), 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p Page) LastEditUnixTime() int64 {
|
func (p Page) LastEditUnixTime() int64 {
|
||||||
return p.Text.LastEditTime() / 1000000000
|
return p.Text.LastEditTime() / 1000000000
|
||||||
}
|
}
|
||||||
|
|
||||||
func Open(name string) (p *Page) {
|
func Open(name string) (p *Page) {
|
||||||
p = new(Page)
|
p = new(Page)
|
||||||
p.Name = name
|
p.Name = name
|
||||||
p.Text = versionedtext.NewVersionedText("")
|
p.Text = versionedtext.NewVersionedText("")
|
||||||
p.Render()
|
p.Render()
|
||||||
bJSON, err := ioutil.ReadFile(path.Join(pathToData, encodeToBase32(strings.ToLower(name))+".json"))
|
bJSON, err := ioutil.ReadFile(path.Join(pathToData, encodeToBase32(strings.ToLower(name))+".json"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
err = json.Unmarshal(bJSON, &p)
|
err = json.Unmarshal(bJSON, &p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p = new(Page)
|
p = new(Page)
|
||||||
}
|
}
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
type DirectoryEntry struct {
|
type DirectoryEntry struct {
|
||||||
Name string
|
Name string
|
||||||
Length int
|
Length int
|
||||||
Numchanges int
|
Numchanges int
|
||||||
LastEdited time.Time
|
LastEdited time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d DirectoryEntry) LastEditTime() string {
|
func (d DirectoryEntry) LastEditTime() string {
|
||||||
return d.LastEdited.Format("Mon Jan 2 15:04:05 MST 2006")
|
return d.LastEdited.Format("Mon Jan 2 15:04:05 MST 2006")
|
||||||
}
|
}
|
||||||
|
|
||||||
func DirectoryList() []DirectoryEntry {
|
func DirectoryList() []DirectoryEntry {
|
||||||
files, _ := ioutil.ReadDir(pathToData)
|
files, _ := ioutil.ReadDir(pathToData)
|
||||||
entries := make([]DirectoryEntry, len(files))
|
entries := make([]DirectoryEntry, len(files))
|
||||||
for i, f := range files {
|
for i, f := range files {
|
||||||
name := DecodeFileName(f.Name())
|
name := DecodeFileName(f.Name())
|
||||||
p := Open(name)
|
p := Open(name)
|
||||||
entries[i] = DirectoryEntry{
|
entries[i] = DirectoryEntry{
|
||||||
Name: name,
|
Name: name,
|
||||||
Length: len(p.Text.GetCurrent()),
|
Length: len(p.Text.GetCurrent()),
|
||||||
Numchanges: p.Text.NumEdits(),
|
Numchanges: p.Text.NumEdits(),
|
||||||
LastEdited: time.Unix(p.Text.LastEditTime()/1000000000, 0),
|
LastEdited: time.Unix(p.Text.LastEditTime()/1000000000, 0),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sort.Slice(entries, func(i, j int) bool { return entries[i].LastEdited.After(entries[j].LastEdited) })
|
sort.Slice(entries, func(i, j int) bool { return entries[i].LastEdited.After(entries[j].LastEdited) })
|
||||||
return entries
|
return entries
|
||||||
}
|
}
|
||||||
|
|
||||||
func DecodeFileName(s string) string {
|
func DecodeFileName(s string) string {
|
||||||
s2, _ := decodeFromBase32(strings.Split(s, ".")[0])
|
s2, _ := decodeFromBase32(strings.Split(s, ".")[0])
|
||||||
return s2
|
return s2
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update cleans the text and updates the versioned text
|
// Update cleans the text and updates the versioned text
|
||||||
// and generates a new render
|
// and generates a new render
|
||||||
func (p *Page) Update(newText string) error {
|
func (p *Page) Update(newText string) error {
|
||||||
// Trim space from end
|
// Trim space from end
|
||||||
newText = strings.TrimRight(newText, "\n\t ")
|
newText = strings.TrimRight(newText, "\n\t ")
|
||||||
|
|
||||||
// Update the versioned text
|
// Update the versioned text
|
||||||
p.Text.Update(newText)
|
p.Text.Update(newText)
|
||||||
|
|
||||||
// Render the new page
|
// Render the new page
|
||||||
p.Render()
|
p.Render()
|
||||||
|
|
||||||
return p.Save()
|
return p.Save()
|
||||||
}
|
}
|
||||||
|
|
||||||
var rBracketPage = regexp.MustCompile(`\[\[(.*?)\]\]`)
|
var rBracketPage = regexp.MustCompile(`\[\[(.*?)\]\]`)
|
||||||
|
|
||||||
func (p *Page) Render() {
|
func (p *Page) Render() {
|
||||||
if p.IsEncrypted {
|
if p.IsEncrypted {
|
||||||
p.RenderedPage = "<code>" + p.Text.GetCurrent() + "</code>"
|
p.RenderedPage = "<code>" + p.Text.GetCurrent() + "</code>"
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert [[page]] to [page](/page/view)
|
// Convert [[page]] to [page](/page/view)
|
||||||
currentText := p.Text.GetCurrent()
|
currentText := p.Text.GetCurrent()
|
||||||
for _, s := range rBracketPage.FindAllString(currentText, -1) {
|
for _, s := range rBracketPage.FindAllString(currentText, -1) {
|
||||||
currentText = strings.Replace(currentText, s, "["+s[2:len(s)-2]+"](/"+s[2:len(s)-2]+"/view)", 1)
|
currentText = strings.Replace(currentText, s, "["+s[2:len(s)-2]+"](/"+s[2:len(s)-2]+"/view)", 1)
|
||||||
}
|
}
|
||||||
p.Text.Update(currentText)
|
p.Text.Update(currentText)
|
||||||
p.RenderedPage = MarkdownToHtml(p.Text.GetCurrent())
|
p.RenderedPage = MarkdownToHtml(p.Text.GetCurrent())
|
||||||
}
|
}
|
||||||
|
|
||||||
var saveMut = sync.Mutex{}
|
var saveMut = sync.Mutex{}
|
||||||
|
|
||||||
func (p *Page) Save() error {
|
func (p *Page) Save() error {
|
||||||
saveMut.Lock()
|
saveMut.Lock()
|
||||||
defer saveMut.Unlock()
|
defer saveMut.Unlock()
|
||||||
bJSON, err := json.MarshalIndent(p, "", " ")
|
bJSON, err := json.MarshalIndent(p, "", " ")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return ioutil.WriteFile(path.Join(pathToData, encodeToBase32(strings.ToLower(p.Name))+".json"), bJSON, 0644)
|
return ioutil.WriteFile(path.Join(pathToData, encodeToBase32(strings.ToLower(p.Name))+".json"), bJSON, 0644)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Page) ChildPageNames() []string {
|
func (p *Page) ChildPageNames() []string {
|
||||||
prefix := strings.ToLower(p.Name + ": ")
|
prefix := strings.ToLower(p.Name + ": ")
|
||||||
files, err := filepath.Glob(path.Join(pathToData, "*"))
|
files, err := filepath.Glob(path.Join(pathToData, "*"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic("Filepath pattern cannot be malformed")
|
panic("Filepath pattern cannot be malformed")
|
||||||
}
|
}
|
||||||
|
|
||||||
result := []string{}
|
result := []string{}
|
||||||
for i := range files {
|
for i := range files {
|
||||||
basename := filepath.Base(files[i])
|
basename := filepath.Base(files[i])
|
||||||
if strings.HasSuffix(basename, ".json") {
|
if strings.HasSuffix(basename, ".json") {
|
||||||
cname, err := decodeFromBase32(basename[:len(basename)-len(".json")])
|
cname, err := decodeFromBase32(basename[:len(basename)-len(".json")])
|
||||||
if err == nil && strings.HasPrefix(strings.ToLower(cname), prefix) {
|
if err == nil && strings.HasPrefix(strings.ToLower(cname), prefix) {
|
||||||
result = append(result, cname)
|
result = append(result, cname)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Page) IsNew() bool {
|
func (p *Page) IsNew() bool {
|
||||||
return !exists(path.Join(pathToData, encodeToBase32(strings.ToLower(p.Name))+".json"))
|
return !exists(path.Join(pathToData, encodeToBase32(strings.ToLower(p.Name))+".json"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Page) Erase() error {
|
func (p *Page) Erase() error {
|
||||||
log.Trace("Erasing " + p.Name)
|
log.Trace("Erasing " + p.Name)
|
||||||
return os.Remove(path.Join(pathToData, encodeToBase32(strings.ToLower(p.Name))+".json"))
|
return os.Remove(path.Join(pathToData, encodeToBase32(strings.ToLower(p.Name))+".json"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Page) Published() bool {
|
func (p *Page) Published() bool {
|
||||||
return p.IsPublished
|
return p.IsPublished
|
||||||
}
|
}
|
||||||
|
144
page_test.go
144
page_test.go
@ -1,72 +1,72 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestListFiles(t *testing.T) {
|
func TestListFiles(t *testing.T) {
|
||||||
pathToData = "testdata"
|
pathToData = "testdata"
|
||||||
os.MkdirAll(pathToData, 0755)
|
os.MkdirAll(pathToData, 0755)
|
||||||
defer os.RemoveAll(pathToData)
|
defer os.RemoveAll(pathToData)
|
||||||
p := Open("testpage")
|
p := Open("testpage")
|
||||||
p.Update("Some data")
|
p.Update("Some data")
|
||||||
p = Open("testpage2")
|
p = Open("testpage2")
|
||||||
p.Update("A different bunch of data")
|
p.Update("A different bunch of data")
|
||||||
p = Open("testpage3")
|
p = Open("testpage3")
|
||||||
p.Update("Not much else")
|
p.Update("Not much else")
|
||||||
n := DirectoryList()
|
n := DirectoryList()
|
||||||
if len(n) != 3 {
|
if len(n) != 3 {
|
||||||
t.Error("Expected three directory entries")
|
t.Error("Expected three directory entries")
|
||||||
t.FailNow()
|
t.FailNow()
|
||||||
}
|
}
|
||||||
if n[0].Name != "testpage" {
|
if n[0].Name != "testpage" {
|
||||||
t.Error("Expected testpage to be first")
|
t.Error("Expected testpage to be first")
|
||||||
}
|
}
|
||||||
if n[1].Name != "testpage2" {
|
if n[1].Name != "testpage2" {
|
||||||
t.Error("Expected testpage2 to be second")
|
t.Error("Expected testpage2 to be second")
|
||||||
}
|
}
|
||||||
if n[2].Name != "testpage3" {
|
if n[2].Name != "testpage3" {
|
||||||
t.Error("Expected testpage3 to be last")
|
t.Error("Expected testpage3 to be last")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGeneral(t *testing.T) {
|
func TestGeneral(t *testing.T) {
|
||||||
pathToData = "testdata"
|
pathToData = "testdata"
|
||||||
os.MkdirAll(pathToData, 0755)
|
os.MkdirAll(pathToData, 0755)
|
||||||
defer os.RemoveAll(pathToData)
|
defer os.RemoveAll(pathToData)
|
||||||
p := Open("testpage")
|
p := Open("testpage")
|
||||||
err := p.Update("**bold**")
|
err := p.Update("**bold**")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
if strings.TrimSpace(p.RenderedPage) != "<p><strong>bold</strong></p>" {
|
if strings.TrimSpace(p.RenderedPage) != "<p><strong>bold</strong></p>" {
|
||||||
t.Errorf("Did not render: '%s'", p.RenderedPage)
|
t.Errorf("Did not render: '%s'", p.RenderedPage)
|
||||||
}
|
}
|
||||||
err = p.Update("**bold** and *italic*")
|
err = p.Update("**bold** and *italic*")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
p.Save()
|
p.Save()
|
||||||
|
|
||||||
p2 := Open("testpage")
|
p2 := Open("testpage")
|
||||||
if strings.TrimSpace(p2.RenderedPage) != "<p><strong>bold</strong> and <em>italic</em></p>" {
|
if strings.TrimSpace(p2.RenderedPage) != "<p><strong>bold</strong> and <em>italic</em></p>" {
|
||||||
t.Errorf("Did not render: '%s'", p2.RenderedPage)
|
t.Errorf("Did not render: '%s'", p2.RenderedPage)
|
||||||
}
|
}
|
||||||
|
|
||||||
p3 := Open("testpage: childpage")
|
p3 := Open("testpage: childpage")
|
||||||
err = p3.Update("**child content**")
|
err = p3.Update("**child content**")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
children := p.ChildPageNames()
|
children := p.ChildPageNames()
|
||||||
if len(children) != 1 {
|
if len(children) != 1 {
|
||||||
t.Errorf("Expected 1 child page to be found, got %d", len(children))
|
t.Errorf("Expected 1 child page to be found, got %d", len(children))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if children[0] != "testpage: childpage" {
|
if children[0] != "testpage: childpage" {
|
||||||
t.Errorf("Expected child page %s to be found (got %s)", "testpage: childpage", children[0])
|
t.Errorf("Expected child page %s to be found (got %s)", "testpage: childpage", children[0])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,35 +1,35 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
func BenchmarkAlliterativeAnimal(b *testing.B) {
|
func BenchmarkAlliterativeAnimal(b *testing.B) {
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
randomAlliterateCombo()
|
randomAlliterateCombo()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestReverseList(t *testing.T) {
|
func TestReverseList(t *testing.T) {
|
||||||
s := []int64{1, 10, 2, 20}
|
s := []int64{1, 10, 2, 20}
|
||||||
if reverseSliceInt64(s)[0] != 20 {
|
if reverseSliceInt64(s)[0] != 20 {
|
||||||
t.Errorf("Could not reverse: %v", s)
|
t.Errorf("Could not reverse: %v", s)
|
||||||
}
|
}
|
||||||
s2 := []string{"a", "b", "d", "c"}
|
s2 := []string{"a", "b", "d", "c"}
|
||||||
if reverseSliceString(s2)[0] != "c" {
|
if reverseSliceString(s2)[0] != "c" {
|
||||||
t.Errorf("Could not reverse: %v", s2)
|
t.Errorf("Could not reverse: %v", s2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestHashing(t *testing.T) {
|
func TestHashing(t *testing.T) {
|
||||||
p := HashPassword("1234")
|
p := HashPassword("1234")
|
||||||
log.Debug(p)
|
log.Debug(p)
|
||||||
err := CheckPasswordHash("1234", p)
|
err := CheckPasswordHash("1234", p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("Should be correct password")
|
t.Errorf("Should be correct password")
|
||||||
}
|
}
|
||||||
err = CheckPasswordHash("1234lkjklj", p)
|
err = CheckPasswordHash("1234lkjklj", p)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.Errorf("Should NOT be correct password")
|
t.Errorf("Should NOT be correct password")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user