Implement BookStack conversion support
parent
bfc1ad40a5
commit
55ad1fbfee
10
BUILD.md
10
BUILD.md
|
@ -1,13 +1,3 @@
|
|||
## Projekt anlegen
|
||||
|
||||
npm init vite@latest ui -- --template react-ts
|
||||
cd ui
|
||||
npm install
|
||||
|
||||
cd ..
|
||||
go get
|
||||
|
||||
|
||||
## Ausführen
|
||||
|
||||
set GOTMPDIR=C:\MYCOMP
|
||||
|
|
31
README.md
31
README.md
|
@ -1,3 +1,32 @@
|
|||
# ncDocConverter
|
||||
|
||||
A Go program able to convert Office Documents automatically to PDF / EPUB Files via OnlyOffice
|
||||
A Go program able to convert documents automatically to PDF / EPUB Files.
|
||||
|
||||
Currently, the following sources for documents are supported:
|
||||
|
||||
* Nextcloud with OnlyOffice
|
||||
* Boockstack
|
||||
|
||||
As a destination to save the converted files only **Nextcloud** is supported.
|
||||
|
||||
|
||||
## Setting it up
|
||||
|
||||
For using the
|
||||
|
||||
|
||||
### BookStack
|
||||
|
||||
For converting books of BookStack you need to create an API token for the user to access the books:
|
||||
1. Login as Admin
|
||||
2. Go to *Settings → Users*
|
||||
3. Select user for API access
|
||||
4. Scroll down to `API Tokens` and click `CREATE TOKEN`
|
||||
5. Set a name and expire date. Click `save`
|
||||
6. Copy the ID and Token. The field `apiToken` will contain the combination from `id:token`
|
||||
|
||||
Now you need also create a new role or edit an existing role.
|
||||
1. Go to *Settings → Roles*
|
||||
2. Edit and existing Role (the role which the user have) or create a new role
|
||||
3. Check the box `Access system API` and `Export content` in `System permissions`
|
||||
4. Assing View Role *(all and own)* for *Shelves, Books, Chapters and Pages*
|
||||
|
|
|
@ -55,7 +55,7 @@ func main() {
|
|||
if err != nil {
|
||||
logger.Error("Unable to parse the file %s: %s", "dd", err)
|
||||
}
|
||||
ncworker.NewScheduler(ncConvertUsers)
|
||||
ncworker.NewScheduler(ncConvertUsers, config)
|
||||
|
||||
if 1 == 1 {
|
||||
return
|
||||
|
|
|
@ -6,17 +6,14 @@ import (
|
|||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
"rpjosh.de/ncDocConverter/internal/api"
|
||||
"rpjosh.de/ncDocConverter/internal/frontend"
|
||||
)
|
||||
|
||||
func (app *WebApplication) routes() http.Handler {
|
||||
frontend := frontend.Frontend{Logger: app.logger, Config: app.config}
|
||||
api := api.Api{Logger: app.logger, Config: app.config}
|
||||
|
||||
router := chi.NewRouter()
|
||||
router.Use(middleware.RealIP, app.recoverPanic, app.logRequest, secureHeaders)
|
||||
|
||||
frontend.SetupServer(router)
|
||||
api.SetupServer(router)
|
||||
|
||||
return router
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
server:
|
||||
# Address to listen on
|
||||
address: ":4000"
|
||||
|
||||
# Path to the folder with the certificates file (cert.pem and key.pem) for using TLS
|
||||
certificate: "/etc/letsencrypt/live/"
|
||||
|
||||
# Enables the development server with hot reload function (spans a vite server)
|
||||
developmentServer: false
|
||||
# Port on which the development server should listen to
|
||||
developmentServerPort: 5173
|
||||
# If this parameter is given, all jobs are executed immediately after starting the program.
|
||||
# Afterward the program does exit -> The "execution" field in the jobs are going to be ignored
|
||||
oneShot: false
|
||||
|
||||
logging:
|
||||
# Minimum log Level for printing to the console (debug, info, warning, error, fatal)
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
{
|
||||
"nextcloudUsers": [
|
||||
{
|
||||
// Nextcloud user and instance to save the converted files
|
||||
"nextcloudUrl": "https://cloud.rpjosh.de",
|
||||
"username": "exchange",
|
||||
"password": "Zj9cQ-eF3n6-R6DSt-8sJXf-kYseJ",
|
||||
|
||||
// OnlyOffice (docx, xlsx, ...) convertion to pdf
|
||||
"jobs": [
|
||||
{
|
||||
"jobName": "Convert my books",
|
||||
"sourceDir": "api/",
|
||||
"destinationDir": "ebooks/",
|
||||
|
||||
// Keep folders of source
|
||||
// Otherwise all files will be saved in the destination dir
|
||||
"keepFolders": true,
|
||||
|
||||
// If the folder should be searched recursive
|
||||
"recursive": true,
|
||||
|
||||
// Execution date in the cron format
|
||||
"execution": "45 23 * * 6"
|
||||
}
|
||||
],
|
||||
|
||||
// Conversion from bookStack to pdf/html
|
||||
"bookStack": {
|
||||
"url": "https://wiki.rpjosh.de",
|
||||
"username": "test@rpjosh.de",
|
||||
"apiToken": "typCf2LoSQDHicpeeAZQCDZwAq7BvVdl:PcKMZVDrIwEJeKIKyaD7w0cf20JCjpZz",
|
||||
|
||||
"jobs": [
|
||||
{
|
||||
"jobName": "Convert my favorite books",
|
||||
|
||||
// Shelves to filter -> convert only shelves with the names "Work" and "Linux"
|
||||
// Leave empty to convert books in all shelves
|
||||
"shelves": [ "Work", "Linux" ],
|
||||
// Regex to filter after the shelv name
|
||||
"shelveRegex": "",
|
||||
|
||||
// Books to filter (see shelves for more informations)
|
||||
"books": [],
|
||||
"booksRegex": "",
|
||||
|
||||
// If books which doesn't belong to an shelf should also be converted.
|
||||
// The will be placed in the root folder.
|
||||
// Note that the field "shelves" and "shelveRegex" doesn't work as expected
|
||||
// (Books inside this shelve won't be excluded but will be placed in the root)
|
||||
"includeBooksWithoutShelve": false,
|
||||
|
||||
// Destination folder to save the converted documents in nextcloud
|
||||
"destinationDir": "ebooks/wiki/",
|
||||
|
||||
// Export format (html or pdf)
|
||||
"format": "html",
|
||||
|
||||
// If the books should be saved inside the shelves folder
|
||||
// Otherwise all files will be saved in the destination dir
|
||||
"keepStructure": true,
|
||||
|
||||
// Execution date in the cron format
|
||||
"execution": "45 23 * * 6",
|
||||
|
||||
// The fetching of books and shelves can be ressource hungry. This value specifies the number
|
||||
// of jobs that are executed with cached data.
|
||||
// Note that new or deleted books and shelves won't be converted until the cache counter
|
||||
// expires. Changes in existing books will still be noted.
|
||||
// Specify zero to disable the cache
|
||||
"cache": 3
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
6
go.mod
6
go.mod
|
@ -3,10 +3,12 @@ module rpjosh.de/ncDocConverter
|
|||
go 1.18
|
||||
|
||||
require (
|
||||
github.com/go-chi/chi/v5 v5.0.7 // indirect
|
||||
github.com/go-chi/chi/v5 v5.0.8 // indirect
|
||||
github.com/go-co-op/gocron v1.18.0 // indirect
|
||||
github.com/go-yaml/yaml v2.1.0+incompatible // indirect
|
||||
github.com/justinas/nosurf v1.1.1 // indirect
|
||||
github.com/studio-b12/gowebdav v0.0.0-20220128162035-c7b1ff8a5e62 // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
golang.org/x/sync v0.1.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
||||
|
|
10
go.sum
10
go.sum
|
@ -1,11 +1,21 @@
|
|||
github.com/go-chi/chi/v5 v5.0.7 h1:rDTPXLDHGATaeHvVlLcR4Qe0zftYethFucbjVQ1PxU8=
|
||||
github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
||||
github.com/go-chi/chi/v5 v5.0.8 h1:lD+NLqFcAi1ovnVZpsnObHGW4xb4J8lNmoYVfECH1Y0=
|
||||
github.com/go-chi/chi/v5 v5.0.8/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
||||
github.com/go-co-op/gocron v1.18.0 h1:SxTyJ5xnSN4byCq7b10LmmszFdxQlSQJod8s3gbnXxA=
|
||||
github.com/go-co-op/gocron v1.18.0/go.mod h1:sD/a0Aadtw5CpflUJ/lpP9Vfdk979Wl1Sg33HPHg0FY=
|
||||
github.com/go-yaml/yaml v2.1.0+incompatible h1:RYi2hDdss1u4YE7GwixGzWwVo47T8UQwnTLB6vQiq+o=
|
||||
github.com/go-yaml/yaml v2.1.0+incompatible/go.mod h1:w2MrLa16VYP0jy6N7M5kHaCkaLENm+P+Tv+MfurjSw0=
|
||||
github.com/justinas/nosurf v1.1.1 h1:92Aw44hjSK4MxJeMSyDa7jwuI9GR2J/JCQiaKvXXSlk=
|
||||
github.com/justinas/nosurf v1.1.1/go.mod h1:ALpWdSbuNGy2lZWtyXdjkYv4edL23oSEgfBT1gPJ5BQ=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
github.com/studio-b12/gowebdav v0.0.0-20220128162035-c7b1ff8a5e62 h1:b2nJXyPCa9HY7giGM+kYcnQ71m14JnGdQabMPmyt++8=
|
||||
github.com/studio-b12/gowebdav v0.0.0-20220128162035-c7b1ff8a5e62/go.mod h1:bHA7t77X/QFExdeAnDzK6vKM34kEZAcE1OX4MfiwjkE=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
package frontend
|
||||
|
||||
import (
|
||||
"text/template"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"rpjosh.de/ncDocConverter/internal/models"
|
||||
"rpjosh.de/ncDocConverter/pkg/logger"
|
||||
)
|
||||
|
||||
// Contains the shared dependencies needed for the WebApplication
|
||||
type Frontend struct {
|
||||
Logger *logger.Logger
|
||||
Config *models.WebConfig
|
||||
templateCache map[string]*template.Template
|
||||
}
|
||||
|
||||
func (app *Frontend) SetupServer(router *chi.Mux) {
|
||||
templateCache, err := newTemplateCache()
|
||||
if err != nil {
|
||||
logger.Fatal("Failed to parse the templates", err)
|
||||
}
|
||||
app.templateCache = templateCache
|
||||
|
||||
app.setServerConfiguration()
|
||||
|
||||
app.routes(router)
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
package frontend
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func (app *Frontend) home(w http.ResponseWriter, r *http.Request) {
|
||||
app.render(w, http.StatusOK, "main.tmpl.html", app.newTemplateData(r))
|
||||
}
|
||||
|
||||
func (app *Frontend) render(w http.ResponseWriter, status int, page string, data *templateData) {
|
||||
ts, ok := app.templateCache[page]
|
||||
if !ok {
|
||||
err := fmt.Errorf("the template %s does not exist", page)
|
||||
app.serverError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
|
||||
err := ts.ExecuteTemplate(buf, "base", data)
|
||||
if err != nil {
|
||||
app.serverError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(status)
|
||||
buf.WriteTo(w)
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
package frontend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
"rpjosh.de/ncDocConverter/pkg/logger"
|
||||
)
|
||||
|
||||
// The serverError helper writes an error message and stack trace to the errorLog,
|
||||
// then sends a generic 500 Internal Server Error response to the user.
|
||||
func (app *Frontend) serverError(w http.ResponseWriter, err error) {
|
||||
trace := fmt.Sprintf("%s\n%s", err.Error(), debug.Stack())
|
||||
logger.Error(trace)
|
||||
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
// The clientError helper sends a specific status code and corresponding description
|
||||
// to the user. We'll use this later in the book to send responses like 400 "Bad
|
||||
// Request" when there's a problem with the request that the user sent.
|
||||
func (app *Frontend) clientError(w http.ResponseWriter, status int) {
|
||||
http.Error(w, http.StatusText(status), status)
|
||||
}
|
||||
|
||||
// For consistency, we'll also implement a notFound helper. This is simply a
|
||||
// convenience wrapper around clientError which sends a 404 Not Found response to
|
||||
// the user.
|
||||
func (app *Frontend) notFound(w http.ResponseWriter) {
|
||||
app.clientError(w, http.StatusNotFound)
|
||||
}
|
|
@ -1,83 +0,0 @@
|
|||
package frontend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"rpjosh.de/ncDocConverter/pkg/logger"
|
||||
"rpjosh.de/ncDocConverter/web"
|
||||
)
|
||||
|
||||
func (app *Frontend) routes(router *chi.Mux) {
|
||||
|
||||
if app.Config.Server.DevelopmentServer {
|
||||
app.renderForDev(router)
|
||||
} else {
|
||||
app.renderForProd(router)
|
||||
}
|
||||
|
||||
router.Route("/", func(mainRouter chi.Router) {
|
||||
mainRouter.Get("/", app.home)
|
||||
})
|
||||
|
||||
router.NotFound(func(w http.ResponseWriter, r *http.Request) {
|
||||
app.notFound(w)
|
||||
})
|
||||
}
|
||||
|
||||
// Runs the vite Server as an sub process for serving the files with the hot reload function.
|
||||
// The src directory will also be exposed from within this server for the asserts
|
||||
func (app *Frontend) renderForDev(router *chi.Mux) {
|
||||
// serve assets from src folder
|
||||
FileServer(router, "/src", http.Dir("./web/app/src"))
|
||||
|
||||
logger.Info("[DEV] Started vite dev server on http://localhost:%d", app.Config.Server.DevelopmentServerPort)
|
||||
vite := exec.Command(filepath.Join(".", "node_modules", ".bin", "vite"), "--mode", "development", "--port", fmt.Sprint(app.Config.Server.DevelopmentServerPort))
|
||||
vite.Dir = "./web/app/"
|
||||
vite.Stdout = os.Stdout
|
||||
vite.Stderr = os.Stderr
|
||||
err := vite.Start()
|
||||
if err != nil {
|
||||
logger.Error("Failed to start the vite development server: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
// This serves all the needed files from the ebedded file system within the binary
|
||||
// -> no additional WebServer
|
||||
func (app *Frontend) renderForProd(router *chi.Mux) {
|
||||
staticFolder, err := fs.Sub(web.FrontendFiles, "app/dist/assets")
|
||||
if err != nil {
|
||||
logger.Fatal("Cannot access the embedded directory 'src'. %s", err)
|
||||
}
|
||||
FileServer(router, "/assets", http.FS(staticFolder))
|
||||
}
|
||||
|
||||
func FileServer(r chi.Router, path string, root http.FileSystem) {
|
||||
if strings.ContainsAny(path, "{}*") {
|
||||
logger.Info("FileServer does not permit any URL parameters.")
|
||||
}
|
||||
|
||||
if path != "/" && path[len(path)-1] != '/' {
|
||||
r.Get(path, http.RedirectHandler(path+"/", http.StatusMovedPermanently).ServeHTTP)
|
||||
path += "/"
|
||||
}
|
||||
path += "*"
|
||||
|
||||
r.Get(path, func(w http.ResponseWriter, r *http.Request) {
|
||||
if strings.HasSuffix(r.URL.Path, "/") {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
rctx := chi.RouteContext(r.Context())
|
||||
pathPrefix := strings.TrimSuffix(rctx.RoutePattern(), "/*")
|
||||
fs := http.StripPrefix(pathPrefix, http.FileServer(root))
|
||||
fs.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
|
@ -1,89 +0,0 @@
|
|||
package frontend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"text/template"
|
||||
|
||||
"rpjosh.de/ncDocConverter/web"
|
||||
)
|
||||
|
||||
type serverConfig struct {
|
||||
Version string
|
||||
Development bool
|
||||
SourceServer string
|
||||
}
|
||||
|
||||
// the server config does never change again -> set this once at startup
|
||||
var serverConf *serverConfig = &serverConfig{
|
||||
Version: "1.0.0",
|
||||
}
|
||||
|
||||
type templateData struct {
|
||||
Version string
|
||||
ServerConfig *serverConfig
|
||||
}
|
||||
|
||||
// Returns the absolute URL on the WebServer to the given TypeScript file given without the file extension
|
||||
// main -> http://localhost:4000/assets/main.js
|
||||
func getJSFile(file string) string {
|
||||
if serverConf.Development {
|
||||
return serverConf.SourceServer + "src/" + file + ".tsx"
|
||||
}
|
||||
|
||||
return serverConf.SourceServer + "assets/" + file + ".js"
|
||||
}
|
||||
|
||||
var functions = template.FuncMap{
|
||||
"getJSFile": getJSFile,
|
||||
}
|
||||
|
||||
func (app *Frontend) setServerConfiguration() {
|
||||
serverConf.Development = app.Config.Server.DevelopmentServer
|
||||
|
||||
sourceServer := ""
|
||||
if serverConf.Development {
|
||||
sourceServer = fmt.Sprintf("http://localhost:%d/", app.Config.Server.DevelopmentServerPort)
|
||||
} else {
|
||||
sourceServer = fmt.Sprintf("http://localhost%s/", app.Config.Server.Address)
|
||||
}
|
||||
serverConf.SourceServer = sourceServer
|
||||
}
|
||||
|
||||
func (app *Frontend) newTemplateData(r *http.Request) *templateData {
|
||||
return &templateData{
|
||||
ServerConfig: serverConf,
|
||||
}
|
||||
}
|
||||
|
||||
// Initializes a new cache containing all templates of the application
|
||||
// from the embedded file system
|
||||
func newTemplateCache() (map[string]*template.Template, error) {
|
||||
cache := map[string]*template.Template{}
|
||||
|
||||
pages, err := fs.Glob(&web.TemplateFiles, "template/pages/*.tmpl.html")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, page := range pages {
|
||||
name := filepath.Base(page)
|
||||
|
||||
patterns := []string{
|
||||
"template/base.tmpl.html",
|
||||
"template/vitejs.tmpl.html",
|
||||
page,
|
||||
}
|
||||
|
||||
ts, err := template.New(name).Funcs(functions).ParseFS(web.TemplateFiles, patterns...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cache[name] = ts
|
||||
}
|
||||
|
||||
return cache, nil
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
package models
|
||||
|
||||
// BookStack details to fetch books from
|
||||
type BookStack struct {
|
||||
URL string `json:"url"`
|
||||
Username string `json:"username"`
|
||||
Token string `json:"apiToken"`
|
||||
|
||||
Jobs []BookStackJob `json:"jobs"`
|
||||
}
|
||||
|
||||
// A concrete BookStacksJob
|
||||
type BookStackJob struct {
|
||||
JobName string `json:"jobName"`
|
||||
DestinationDir string `json:"destinationDir"`
|
||||
|
||||
Shelves []string `json:"shelves"`
|
||||
ShelvesRegex string `json:"shelveRegex"`
|
||||
|
||||
Books []string `json:"books"`
|
||||
BooksRegex string `json:"booksRegex"`
|
||||
|
||||
IncludeBooksWithoutShelve bool `json:"includeBooksWithoutShelve"`
|
||||
Format Format `json:"format"`
|
||||
KeepStructure bool `json:"keepStructure"`
|
||||
|
||||
Recursive string `json:"recursive"`
|
||||
Execution string `json:"execution"`
|
||||
|
||||
CacheCount int `json:"cache"`
|
||||
}
|
||||
|
||||
type Format string
|
||||
|
||||
const (
|
||||
HTML Format = "html"
|
||||
PDF Format = "pdf"
|
||||
)
|
|
@ -7,25 +7,32 @@ import (
|
|||
"os"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
AuthUser string`json:"authUser"`
|
||||
NextcloudBaseUrl string`json:"nextcloudUrl"`
|
||||
Username string`json:"username"`
|
||||
Password string`json:"password"`
|
||||
ConvertJobs []ConvertJob`json:"jobs"`
|
||||
// The root nextcloud user where the files are stored
|
||||
// and the files for onlyoffice jobs are defined
|
||||
type NextcloudUser struct {
|
||||
NextcloudBaseUrl string `json:"nextcloudUrl"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
|
||||
// OnlyOffice
|
||||
ConvertJobs []NcConvertJob `json:"jobs"`
|
||||
|
||||
// BookStack
|
||||
BookStack BookStack `json:"bookStack"`
|
||||
}
|
||||
|
||||
type ConvertJob struct {
|
||||
JobName string`json:"jobName"`
|
||||
SourceDir string`json:"sourceDir"`
|
||||
DestinationDir string`json:"destinationDir"`
|
||||
KeepFolders string`json:"keepFolders"`
|
||||
Recursive string`json:"recursive"`
|
||||
Executions []string`json:"execution"`
|
||||
// A OnlyOffice docs convert job
|
||||
type NcConvertJob struct {
|
||||
JobName string `json:"jobName"`
|
||||
SourceDir string `json:"sourceDir"`
|
||||
DestinationDir string `json:"destinationDir"`
|
||||
KeepFolders string `json:"keepFolders"`
|
||||
Recursive string `json:"recursive"`
|
||||
Execution string `json:"execution"`
|
||||
}
|
||||
|
||||
type NcConvertUsers struct {
|
||||
Users []User`json:"users"`
|
||||
Users []NextcloudUser `json:"nextcloudUsers"`
|
||||
}
|
||||
|
||||
// Parses the given file to the in memory struct
|
||||
|
|
|
@ -14,10 +14,9 @@ type WebConfig struct {
|
|||
}
|
||||
|
||||
type Server struct {
|
||||
Address string `yaml:"address"`
|
||||
Certificate string `yaml:"certificate"`
|
||||
DevelopmentServer bool `yaml:"developmentServer"`
|
||||
DevelopmentServerPort int `yaml:"developmentServerPort"`
|
||||
Address string `yaml:"address"`
|
||||
Certificate string `yaml:"certificate"`
|
||||
OneShot bool `yaml:"oneShot"`
|
||||
}
|
||||
|
||||
type Logging struct {
|
||||
|
@ -26,7 +25,7 @@ type Logging struct {
|
|||
LogFilePath string `yaml:"logFilePath"`
|
||||
}
|
||||
|
||||
// Parses the given configuration file (.yaml file) to an WebConfiguration
|
||||
// Parses the configuration file (.yaml file) to an WebConfiguration
|
||||
func ParseWebConfig(webConfig *WebConfig, file string) (*WebConfig, error) {
|
||||
if file == "" {
|
||||
return webConfig, nil
|
||||
|
@ -47,8 +46,7 @@ func ParseWebConfig(webConfig *WebConfig, file string) (*WebConfig, error) {
|
|||
func getDefaultConfig() *WebConfig {
|
||||
return &WebConfig{
|
||||
Server: Server{
|
||||
Address: ":4000",
|
||||
DevelopmentServerPort: 5173,
|
||||
Address: ":4000",
|
||||
},
|
||||
Logging: Logging{
|
||||
PrintLogLevel: "info",
|
||||
|
@ -78,12 +76,12 @@ func SetConfig() (*WebConfig, error) {
|
|||
_ = flag.String("config", "./config.yaml", "Path to the configuration file (see configs/config.yaml) for an example")
|
||||
address := flag.String("address", webConfig.Server.Address, "Address and port on which the api and the web server should listen to")
|
||||
printLogLevel := flag.String("printLogLevel", webConfig.Logging.PrintLogLevel, "Minimum log level to log (debug, info, warning, error, fatal)")
|
||||
devServer := flag.Bool("dev", webConfig.Server.DevelopmentServer, "Enables the development server with hot reload support")
|
||||
oneShot := flag.Bool("oneShot", webConfig.Server.OneShot, "All jobs are executed immediately and the program exists afterwards")
|
||||
|
||||
flag.Parse()
|
||||
webConfig.Server.Address = *address
|
||||
webConfig.Logging.PrintLogLevel = *printLogLevel
|
||||
webConfig.Server.DevelopmentServer = *devServer
|
||||
webConfig.Server.OneShot = *oneShot
|
||||
|
||||
defaultLogger := logger.Logger{
|
||||
PrintLevel: logger.GetLevelByName(webConfig.Logging.PrintLogLevel),
|
||||
|
|
|
@ -0,0 +1,589 @@
|
|||
package ncworker
|
||||
|
||||
// @TODO delete folders for shelves that doesn't exist anyore
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"rpjosh.de/ncDocConverter/internal/models"
|
||||
"rpjosh.de/ncDocConverter/internal/nextcloud"
|
||||
"rpjosh.de/ncDocConverter/pkg/logger"
|
||||
"rpjosh.de/ncDocConverter/pkg/utils"
|
||||
)
|
||||
|
||||
type BsJob struct {
|
||||
job *models.BookStackJob
|
||||
ncUser *models.NextcloudUser
|
||||
|
||||
cacheCount int
|
||||
cacheBooks map[int]book
|
||||
cacheShelves []shelf
|
||||
// If the cache should be usedi n the current execution
|
||||
useCache bool
|
||||
}
|
||||
|
||||
type shelf struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
|
||||
// This has to be fetched extra
|
||||
books []int
|
||||
}
|
||||
type shelfDetails struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Tags []string `json:"tags"`
|
||||
Books []struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
} `json:"books"`
|
||||
}
|
||||
type shelves struct {
|
||||
Data []shelf `json:"data"`
|
||||
}
|
||||
|
||||
type book struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
|
||||
// This has to be calculated of the latest modify page of a page
|
||||
lastModified time.Time
|
||||
// If the book should be ignored to convert
|
||||
ignore bool
|
||||
|
||||
// If the book has been already converted
|
||||
converted bool
|
||||
}
|
||||
type books struct {
|
||||
Data []book `json:"data"`
|
||||
}
|
||||
type bookDetails struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Contents []struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
BookID int `json:"book_id"`
|
||||
ChapterID int `json:"chapter_id"`
|
||||
Draft bool `json:"draft"`
|
||||
Template bool `json:"template"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
URL string `json:"url"`
|
||||
Type string `json:"type"`
|
||||
} `json:"contents"`
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
|
||||
func NewBsJob(job *models.BookStackJob, ncUser *models.NextcloudUser) *BsJob {
|
||||
bsJob := BsJob{
|
||||
job: job,
|
||||
ncUser: ncUser,
|
||||
}
|
||||
|
||||
return &bsJob
|
||||
}
|
||||
|
||||
func (job *BsJob) ExecuteJob() {
|
||||
// Get all existing files in the destination folder
|
||||
destination, err := nextcloud.SearchInDirectory(
|
||||
job.ncUser, job.job.DestinationDir,
|
||||
[]string{
|
||||
"text/html",
|
||||
"application/pdf",
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get files in destination directory '%s': %s", job.job.DestinationDir, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Make a map with path as index
|
||||
destinationMap := make(map[string]ncFiles)
|
||||
|
||||
preCount := len("/remote.php/dav/files/" + job.ncUser.Username + "/")
|
||||
for _, file := range destination.Response {
|
||||
href, _ := url.QueryUnescape(file.Href)
|
||||
path := href[preCount:]
|
||||
var extension = filepath.Ext(path)
|
||||
var name = path[0 : len(path)-len(extension)][len(job.job.DestinationDir):]
|
||||
|
||||
destinationMap[name] = ncFiles{
|
||||
extension: extension,
|
||||
path: path,
|
||||
lastModified: file.GetLastModified(),
|
||||
}
|
||||
}
|
||||
|
||||
// Check for cache
|
||||
job.cache()
|
||||
|
||||
// Get all shelves
|
||||
shelves, err := job.getShelves()
|
||||
if err != nil {
|
||||
logger.Error("Failed to get shelves: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Get all books
|
||||
books, err := job.getBooks()
|
||||
if err != nil {
|
||||
logger.Error("Failed to get books: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Index books by path
|
||||
indexedBooks := job.getIndexedBooks(shelves, books)
|
||||
|
||||
// Cache data
|
||||
if job.job.CacheCount > 0 && !job.useCache {
|
||||
job.cacheCount = job.job.CacheCount
|
||||
|
||||
job.cacheShelves = *shelves
|
||||
job.cacheBooks = utils.CopyMap(*books)
|
||||
}
|
||||
|
||||
// Now finally convert the books :)
|
||||
convertCount := 0
|
||||
var wg sync.WaitGroup
|
||||
for i, b := range indexedBooks {
|
||||
// mark as converted
|
||||
indexedBooks[i].converted = true
|
||||
(*books)[b.ID] = *indexedBooks[i]
|
||||
|
||||
// check if it has to be converted again (updated) or for the first time
|
||||
des, exists := destinationMap[i]
|
||||
|
||||
if (!exists || b.lastModified.After(des.lastModified)) && !b.ignore {
|
||||
wg.Add(1)
|
||||
convertCount++
|
||||
go func(book book, path string) {
|
||||
defer wg.Done()
|
||||
job.convertBook(book, path)
|
||||
}(*b, i)
|
||||
} else if b.ignore {
|
||||
logger.Debug("Duplicate book name: %s", b.Name)
|
||||
}
|
||||
|
||||
// Ignore states that a book with a duplicate name exists → delete the orig also
|
||||
if !b.ignore {
|
||||
delete(destinationMap, i)
|
||||
}
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
// Convert remaining books
|
||||
if job.job.IncludeBooksWithoutShelve {
|
||||
for _, b := range *books {
|
||||
// check if it has to be converted again (updated) or for the first time
|
||||
des, exists := destinationMap[b.Name]
|
||||
|
||||
if !b.converted && !b.ignore && (!exists || b.lastModified.After(des.lastModified)) {
|
||||
wg.Add(1)
|
||||
convertCount++
|
||||
go func(book book, path string) {
|
||||
defer wg.Done()
|
||||
job.convertBook(book, path)
|
||||
}(b, b.Name)
|
||||
}
|
||||
delete(destinationMap, b.Name)
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
// Delete the files which are not available anymore
|
||||
for _, dest := range destinationMap {
|
||||
err := nextcloud.DeleteFile(job.ncUser, dest.path)
|
||||
if err != nil {
|
||||
logger.Error(utils.FirstCharToUppercase(err.Error()))
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("Finished BookStack job \"%s\": %d books converted", job.job.JobName, convertCount)
|
||||
}
|
||||
|
||||
// Checks and initializes the cache
|
||||
func (job *BsJob) cache() {
|
||||
if job.job.CacheCount > 0 {
|
||||
job.cacheCount--
|
||||
if job.cacheCount < 0 {
|
||||
job.useCache = false
|
||||
} else {
|
||||
job.useCache = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Return the relative path of the book to save in nextcloud
|
||||
func (job *BsJob) getPath(bookName string, shelfName string) string {
|
||||
if job.job.KeepStructure {
|
||||
return shelfName + "/" + bookName
|
||||
} else {
|
||||
return bookName
|
||||
}
|
||||
}
|
||||
|
||||
// Gets all shelves
|
||||
func (job *BsJob) getShelves() (*[]shelf, error) {
|
||||
if job.useCache {
|
||||
return &job.cacheShelves, nil
|
||||
}
|
||||
|
||||
client := http.Client{Timeout: 10 * time.Second}
|
||||
|
||||
req := job.getRequest(http.MethodGet, "shelves", nil)
|
||||
|
||||
// Add shelve filter
|
||||
q := req.URL.Query()
|
||||
for _, j := range job.job.Shelves {
|
||||
q.Add("filter[name:eq]", j)
|
||||
}
|
||||
req.URL.RawQuery = q.Encode()
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
if res.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("expected status code 200, got %d", res.StatusCode)
|
||||
}
|
||||
|
||||
rtc := shelves{}
|
||||
if err = json.NewDecoder(res.Body).Decode(&rtc); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode response: %s", err)
|
||||
}
|
||||
|
||||
if job.job.ShelvesRegex != "" {
|
||||
reg, err := regexp.Compile(job.job.ShelvesRegex)
|
||||
// This is fatal
|
||||
logger.Fatal("Failed to parse the regex '%s': %s", job.job.ShelvesRegex, err)
|
||||
|
||||
rtc2 := shelves{}
|
||||
|
||||
for i, shelve := range rtc.Data {
|
||||
if reg.Match([]byte(shelve.Name)) {
|
||||
rtc2.Data = append(rtc2.Data, rtc.Data[i])
|
||||
} else {
|
||||
logger.Debug("Ignoring shelve %s", shelve.Name)
|
||||
}
|
||||
}
|
||||
|
||||
rtc = rtc2
|
||||
}
|
||||
|
||||
return &rtc.Data, nil
|
||||
}
|
||||
|
||||
// Returns the IDs of books which belongs to the shelf
|
||||
func (job *BsJob) getBooksInShelve(id int) ([]int, error) {
|
||||
client := http.Client{Timeout: 10 * time.Second}
|
||||
req := job.getRequest(http.MethodGet, "shelves/"+fmt.Sprintf("%d", id), nil)
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
if res.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("expected status code 200, got %d", res.StatusCode)
|
||||
}
|
||||
|
||||
shelfDetails := shelfDetails{}
|
||||
if err = json.NewDecoder(res.Body).Decode(&shelfDetails); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode response: %s", err)
|
||||
}
|
||||
|
||||
rtc := make([]int, len(shelfDetails.Books))
|
||||
for i, details := range shelfDetails.Books {
|
||||
rtc[i] = details.ID
|
||||
}
|
||||
|
||||
return rtc, nil
|
||||
}
|
||||
|
||||
// Indexes the books by the relative path
|
||||
func (job *BsJob) getIndexedBooks(shelves *[]shelf, books *map[int]book) map[string]*book {
|
||||
// Now it has to be checked which book belongs to which shelve.
|
||||
// When cached this was already done
|
||||
if !job.useCache {
|
||||
var wg sync.WaitGroup
|
||||
for i, shelv := range *shelves {
|
||||
wg.Add(1)
|
||||
|
||||
go func(shelf shelf, index int) {
|
||||
defer wg.Done()
|
||||
|
||||
ids, err := job.getBooksInShelve(shelf.ID)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get shelf details: %s", err)
|
||||
} else {
|
||||
b := make([]int, 0)
|
||||
|
||||
for _, id := range ids {
|
||||
// Check if book should be excluded → it is not contained in the book map
|
||||
book, exists := (*books)[id]
|
||||
if exists {
|
||||
b = append(b, book.ID)
|
||||
}
|
||||
}
|
||||
|
||||
(*shelves)[index].books = b
|
||||
}
|
||||
}(shelv, i)
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
// A book can have the same name. This would lead to conflicts
|
||||
// if they are in the same shelve / folder.
|
||||
// In such a case the ID of the book will be appended to the name "bookName_123".
|
||||
// Because of that a map indexed by the path will be created and AFTERWARDS the file is converted
|
||||
indexedBooks := make(map[string]*book)
|
||||
for _, shelf := range *shelves {
|
||||
for _, bookId := range shelf.books {
|
||||
b := (*books)[bookId]
|
||||
bookPath := job.getPath(b.Name, shelf.Name)
|
||||
existingBook, doesExists := indexedBooks[bookPath]
|
||||
|
||||
if doesExists {
|
||||
// The book path will be appended
|
||||
newBookPath := fmt.Sprintf("%s_%d", bookPath, b.ID)
|
||||
indexedBooks[newBookPath] = &b
|
||||
|
||||
// Also add the other book with the ID
|
||||
otherNewBookPath := fmt.Sprintf("%s_%d", bookPath, existingBook.ID)
|
||||
indexedBooks[otherNewBookPath] = existingBook
|
||||
|
||||
// The original book won't be removed because otherwise a third book with the same
|
||||
// name will be inserted using its real name.
|
||||
// But because this is a pointer, a copy is needed
|
||||
var existingBookCopy book
|
||||
utils.Copy(existingBook, &existingBookCopy)
|
||||
existingBookCopy.ignore = true
|
||||
indexedBooks[bookPath] = &existingBookCopy
|
||||
} else {
|
||||
indexedBooks[bookPath] = &b
|
||||
}
|
||||
}
|
||||
|
||||
// If the structure should be keept, a folder for every shelve has to be created
|
||||
if job.job.KeepStructure && !job.useCache {
|
||||
nextcloud.CreateFoldersRecursively(job.ncUser, job.job.DestinationDir+shelf.Name+"/")
|
||||
}
|
||||
}
|
||||
|
||||
return indexedBooks
|
||||
}
|
||||
|
||||
// Gets all books and returns a map indexed by the ID of the book
|
||||
func (job *BsJob) getBooks() (*map[int]book, error) {
|
||||
if job.useCache {
|
||||
books := utils.CopyMap(job.cacheBooks)
|
||||
|
||||
// The last Change date has to be updated even in cache
|
||||
var wg sync.WaitGroup
|
||||
var mut = &sync.Mutex{}
|
||||
for i, b := range books {
|
||||
wg.Add(1)
|
||||
|
||||
go func(book book, index int) {
|
||||
defer wg.Done()
|
||||
lastModified, err := job.getLastModifiedOfBook(book.ID)
|
||||
if err != nil {
|
||||
logger.Warning("Failed to get last modified date of book %s (%d) - using old date: %s", book.Name, book.ID, err)
|
||||
return
|
||||
}
|
||||
|
||||
book.lastModified = *lastModified
|
||||
|
||||
mut.Lock()
|
||||
books[index] = book
|
||||
mut.Unlock()
|
||||
}(b, i)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
return &books, nil
|
||||
}
|
||||
|
||||
client := http.Client{Timeout: 10 * time.Second}
|
||||
req := job.getRequest(http.MethodGet, "books", nil)
|
||||
|
||||
// Add shelve filter
|
||||
q := req.URL.Query()
|
||||
for _, j := range job.job.Books {
|
||||
q.Add("filter[name:eq]", j)
|
||||
}
|
||||
req.URL.RawQuery = q.Encode()
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
if res.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("expected status code 200, got %d", res.StatusCode)
|
||||
}
|
||||
|
||||
booksArray := books{}
|
||||
if err = json.NewDecoder(res.Body).Decode(&booksArray); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode response: %s", err)
|
||||
}
|
||||
|
||||
if job.job.BooksRegex != "" {
|
||||
reg, err := regexp.Compile(job.job.BooksRegex)
|
||||
// This is fatal
|
||||
logger.Fatal("Failed to parse the regex '%s': %s", job.job.BooksRegex, err)
|
||||
|
||||
booksArray2 := books{}
|
||||
|
||||
for i, book := range booksArray.Data {
|
||||
if reg.Match([]byte(book.Name)) {
|
||||
booksArray2.Data = append(booksArray2.Data, booksArray.Data[i])
|
||||
} else {
|
||||
logger.Debug("Ignoring shelve %s", book.Name)
|
||||
}
|
||||
}
|
||||
|
||||
booksArray = booksArray2
|
||||
}
|
||||
|
||||
// Create indexed map
|
||||
rtc := make(map[int]book)
|
||||
var wg sync.WaitGroup
|
||||
var mut = &sync.Mutex{}
|
||||
for _, b := range booksArray.Data {
|
||||
wg.Add(1)
|
||||
|
||||
go func(b book) {
|
||||
defer wg.Done()
|
||||
lastModified, err := job.getLastModifiedOfBook(b.ID)
|
||||
if err != nil {
|
||||
logger.Warning("Failed to get last modified date of book %s (%d) - skipping: %s", b.Name, b.ID, err)
|
||||
return
|
||||
}
|
||||
|
||||
if lastModified.Unix() == 0 {
|
||||
logger.Info("Skipping book %s (%d) because of no content", b.Name, b.ID)
|
||||
return
|
||||
}
|
||||
|
||||
mut.Lock()
|
||||
rtc[b.ID] = book{
|
||||
ID: b.ID,
|
||||
Name: b.Name,
|
||||
lastModified: *lastModified,
|
||||
}
|
||||
mut.Unlock()
|
||||
}(b)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
return &rtc, nil
|
||||
}
|
||||
|
||||
// Returns the last modified time of a book
|
||||
func (job *BsJob) getLastModifiedOfBook(id int) (*time.Time, error) {
|
||||
client := http.Client{Timeout: 10 * time.Second}
|
||||
req := job.getRequest(http.MethodGet, "books/"+fmt.Sprintf("%d", id), nil)
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
if res.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("expected status code 200, got %d", res.StatusCode)
|
||||
}
|
||||
|
||||
bd := bookDetails{}
|
||||
if err = json.NewDecoder(res.Body).Decode(&bd); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode response: %s", err)
|
||||
}
|
||||
|
||||
lastMod := time.Unix(0, 0)
|
||||
for i, content := range bd.Contents {
|
||||
if content.Template || content.Draft {
|
||||
continue
|
||||
}
|
||||
|
||||
if content.UpdatedAt.After(lastMod) {
|
||||
lastMod = bd.Contents[i].UpdatedAt
|
||||
}
|
||||
}
|
||||
|
||||
return &lastMod, nil
|
||||
}
|
||||
|
||||
// Returns a new request to the bookStack API.
|
||||
// The path beginning AFTER /api/ should be given (e.g.: shelves)
|
||||
func (job *BsJob) getRequest(method string, path string, body io.Reader) *http.Request {
|
||||
req, err := http.NewRequest(method, job.ncUser.BookStack.URL+"/api/"+path, body)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
req.Header.Set("Authorization", "Token "+job.ncUser.BookStack.Token)
|
||||
|
||||
return req
|
||||
}
|
||||
|
||||
// Converts the given book and uploads it to nextcloud.
|
||||
// The path is being expected relative to the root dir of the jobs directory and does
|
||||
// not contain a file extension
|
||||
func (job *BsJob) convertBook(book book, path string) {
|
||||
fileExtension, url := job.getFileExtension()
|
||||
|
||||
client := http.Client{Timeout: 10 * time.Second}
|
||||
req := job.getRequest(http.MethodGet, fmt.Sprintf("books/%d/export/%s", book.ID, url), nil)
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
logger.Error("Failed to convert book: %s", err)
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
if res.StatusCode != 200 {
|
||||
logger.Error("Failed to convert book: expected status code 200, got %d", res.StatusCode)
|
||||
return
|
||||
}
|
||||
|
||||
err = nextcloud.UploadFile(job.ncUser, job.job.DestinationDir+path+fileExtension, res.Body)
|
||||
if err != nil {
|
||||
logger.Error("Failed to upload book to nextcloud: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (job *BsJob) getFileExtension() (fileExtension string, url string) {
|
||||
switch strings.ToLower(string(job.job.Format)) {
|
||||
case "html":
|
||||
{
|
||||
fileExtension = ".html"
|
||||
url = "html"
|
||||
}
|
||||
case "pdf":
|
||||
{
|
||||
fileExtension = ".pdf"
|
||||
url = "pdf"
|
||||
}
|
||||
default:
|
||||
{
|
||||
logger.Fatal("Invalid format given: '%s'. Expected 'html' or 'pdf'", job.job.Format)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
|
@ -1,30 +1,114 @@
|
|||
package ncworker
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/go-co-op/gocron"
|
||||
"rpjosh.de/ncDocConverter/internal/models"
|
||||
"rpjosh.de/ncDocConverter/pkg/logger"
|
||||
)
|
||||
|
||||
type NcConvertScheduler struct {
|
||||
users *models.NcConvertUsers
|
||||
users *models.NcConvertUsers
|
||||
config *models.WebConfig
|
||||
|
||||
scheduler *gocron.Scheduler
|
||||
}
|
||||
|
||||
func NewScheduler(users *models.NcConvertUsers) *NcConvertScheduler {
|
||||
scheduler := NcConvertScheduler {
|
||||
users: users,
|
||||
func NewScheduler(users *models.NcConvertUsers, config *models.WebConfig) *NcConvertScheduler {
|
||||
scheduler := NcConvertScheduler{
|
||||
users: users,
|
||||
config: config,
|
||||
scheduler: gocron.NewScheduler(time.Local),
|
||||
}
|
||||
// Don't reschedule a task if it's still running
|
||||
scheduler.scheduler.SingletonMode()
|
||||
scheduler.scheduler.StartAsync()
|
||||
|
||||
scheduler.ScheduleExecutions()
|
||||
if config.Server.OneShot {
|
||||
scheduler.ScheduleExecutionsOneShot()
|
||||
} else {
|
||||
scheduler.ScheduleExecutions()
|
||||
|
||||
fmt.Println("Started in schedule mode.\nType \"exit\" to leave or \"execute\" to execute all jobs")
|
||||
// Endless loop
|
||||
for {
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
text, err := reader.ReadString('\n')
|
||||
if err != nil {
|
||||
// No console input
|
||||
var wg sync.WaitGroup
|
||||
logger.Debug("No console available")
|
||||
wg.Add(1)
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
input := strings.Trim(strings.ToLower(text), "\n")
|
||||
if input == "exit" {
|
||||
break
|
||||
} else if input == "execute" {
|
||||
scheduler.scheduler.RunAll()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &scheduler
|
||||
}
|
||||
|
||||
func (scheduler NcConvertScheduler) ScheduleExecutions() {
|
||||
// Executes all jobs and exits the program afterwards
|
||||
func (scheduler NcConvertScheduler) ScheduleExecutionsOneShot() {
|
||||
for _, user := range scheduler.users.Users {
|
||||
|
||||
// Schedule Nextcloud jobs
|
||||
for _, job := range user.ConvertJobs {
|
||||
convJob := NewJob(&job, &user)
|
||||
convJob := NewNcJob(&job, &user)
|
||||
convJob.ExecuteJob()
|
||||
}
|
||||
|
||||
// Schedule boockstack jobs
|
||||
if user.BookStack.URL != "" {
|
||||
for _, job := range user.BookStack.Jobs {
|
||||
bsJob := NewBsJob(&job, &user)
|
||||
bsJob.ExecuteJob()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// Schedules all jobs with gocron
|
||||
func (s NcConvertScheduler) ScheduleExecutions() {
|
||||
for ui, user := range s.users.Users {
|
||||
|
||||
// Schedule Nextcloud jobs
|
||||
for i, job := range user.ConvertJobs {
|
||||
convJob := NewNcJob(&s.users.Users[ui].ConvertJobs[i], &s.users.Users[i])
|
||||
|
||||
_, err := s.scheduler.Cron(job.Execution).DoWithJobDetails(s.executeJob, convJob)
|
||||
if err != nil {
|
||||
logger.Fatal("Failed to schedule office job '%s': %s", job.JobName, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Schedule boockstack jobs
|
||||
if user.BookStack.URL != "" {
|
||||
for i, job := range user.BookStack.Jobs {
|
||||
bsJob := NewBsJob(&s.users.Users[ui].BookStack.Jobs[i], &s.users.Users[i])
|
||||
|
||||
_, err := s.scheduler.Cron(job.Execution).DoWithJobDetails(s.executeJob, bsJob)
|
||||
if err != nil {
|
||||
logger.Fatal("Failed to schedule BookStack job '%s': %s", job.JobName, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s NcConvertScheduler) executeJob(job Job, scheduledJob gocron.Job) {
|
||||
job.ExecuteJob()
|
||||
}
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
package ncworker
|
||||
|
||||
type Job interface {
|
||||
ExecuteJob()
|
||||
}
|
|
@ -1,307 +0,0 @@
|
|||
package ncworker
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"rpjosh.de/ncDocConverter/internal/models"
|
||||
"rpjosh.de/ncDocConverter/pkg/logger"
|
||||
"rpjosh.de/ncDocConverter/web"
|
||||
)
|
||||
|
||||
type convertJob struct {
|
||||
job *models.ConvertJob
|
||||
user *models.User
|
||||
}
|
||||
|
||||
type searchResult struct {
|
||||
XMLName xml.Name `xml:"multistatus"`
|
||||
Text string `xml:",chardata"`
|
||||
D string `xml:"d,attr"`
|
||||
S string `xml:"s,attr"`
|
||||
Oc string `xml:"oc,attr"`
|
||||
Nc string `xml:"nc,attr"`
|
||||
Response []struct {
|
||||
Text string `xml:",chardata"`
|
||||
Href string `xml:"href"`
|
||||
Propstat struct {
|
||||
Text string `xml:",chardata"`
|
||||
Prop struct {
|
||||
Text string `xml:",chardata"`
|
||||
Getcontenttype string `xml:"getcontenttype"`
|
||||
Getlastmodified string `xml:"getlastmodified"`
|
||||
Size string `xml:"size"`
|
||||
Fileid int `xml:"fileid"`
|
||||
} `xml:"prop"`
|
||||
Status string `xml:"status"`
|
||||
} `xml:"propstat"`
|
||||
} `xml:"response"`
|
||||
}
|
||||
|
||||
type ncFiles struct {
|
||||
extension string
|
||||
path string
|
||||
lastModified time.Time
|
||||
contentType string
|
||||
size int
|
||||
fileid int
|
||||
}
|
||||
|
||||
type searchTemplateData struct {
|
||||
Username string
|
||||
Directory string
|
||||
ContentType []string
|
||||
}
|
||||
|
||||
func NewJob(job *models.ConvertJob, user *models.User) *convertJob {
|
||||
convJob := &convertJob{
|
||||
job: job,
|
||||
user: user,
|
||||
|
||||
}
|
||||
|
||||
return convJob
|
||||
}
|
||||
|
||||
func (job *convertJob) ExecuteJob() {
|
||||
source := job.searchInDirectory(
|
||||
job.job.SourceDir,
|
||||
[]string {
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
"application/msword",
|
||||
},
|
||||
)
|
||||
destination := job.searchInDirectory(
|
||||
job.job.DestinationDir,
|
||||
[]string {
|
||||
"application/pdf",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
preCount := len("/remote.php/dav/files/" + job.user.Username + "/")
|
||||
// store the files in a map
|
||||
sourceMap := make(map[string]ncFiles)
|
||||
destinationMap := make(map[string]ncFiles)
|
||||
|
||||
for _, file := range source.Response {
|
||||
path := file.Href[preCount:]
|
||||
var extension = filepath.Ext(path)
|
||||
var name = path[0:len(path)-len(extension)][len(job.job.SourceDir):]
|
||||
// Time format: Fri, 23 Sep 2022 05:46:31 GMT
|
||||
time, err := time.Parse("Mon, 02 Jan 2006 15:04:05 GMT", file.Propstat.Prop.Getlastmodified)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
size, err := strconv.Atoi(file.Propstat.Prop.Size)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
sourceMap[name] = ncFiles{
|
||||
extension: extension,
|
||||
path: path,
|
||||
lastModified: time,
|
||||
size: size,
|
||||
contentType: file.Propstat.Prop.Getcontenttype,
|
||||
fileid: file.Propstat.Prop.Fileid,
|
||||
}
|
||||
}
|
||||
|
||||
for _, file := range destination.Response {
|
||||
path := file.Href[preCount:]
|
||||
var extension = filepath.Ext(path)
|
||||
var name = path[0:len(path)-len(extension)][len(job.job.DestinationDir):]
|
||||
|
||||
time, err := time.Parse("Mon, 02 Jan 2006 15:04:05 GMT", file.Propstat.Prop.Getlastmodified)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
size, err := strconv.Atoi(file.Propstat.Prop.Size)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
destinationMap[name] = ncFiles{
|
||||
extension: extension,
|
||||
path: path,
|
||||
lastModified: time,
|
||||
size: size,
|
||||
contentType: file.Propstat.Prop.Getcontenttype,
|
||||
fileid: file.Propstat.Prop.Fileid,
|
||||
}
|
||||
}
|
||||
|
||||
for index, source := range sourceMap {
|
||||
// check if the file exists in the destination map
|
||||
if dest, exists := destinationMap[index]; exists {
|
||||
// compare timestamp and size
|
||||
if dest.lastModified.Before(source.lastModified) {
|
||||
job.convertFile(source.path, source.fileid, dest.path)
|
||||
}
|
||||
delete(destinationMap, index)
|
||||
} else {
|
||||
job.convertFile(
|
||||
source.path, source.fileid, job.getDestinationDir(source.path),
|
||||
)
|
||||
delete(destinationMap, index)
|
||||
}
|
||||
}
|
||||
|
||||
// delete the files which are not available anymore
|
||||
for _, dest := range destinationMap {
|
||||
job.deleteFile(dest.path)
|
||||
}
|
||||
}
|
||||
|
||||
func (job *convertJob) getDestinationDir(sourceFile string) string {
|
||||
sourceFile = sourceFile[len(job.job.SourceDir):]
|
||||
var extension = filepath.Ext(sourceFile)
|
||||
var name = sourceFile[0:len(sourceFile)-len(extension)]
|
||||
|
||||
return job.job.DestinationDir + name + ".pdf"
|
||||
}
|
||||
|
||||
func (job *convertJob) createFoldersRecursively(destinationFile string) {
|
||||
s := strings.Split(destinationFile, "/")
|
||||
folderTree := ""
|
||||
|
||||
logger.Debug("Creating directory for file '%s'", destinationFile)
|
||||
|
||||
// webdav doesn't have an function to create directories recursively
|
||||
for _, folder := range s[:len(s) - 1] {
|
||||
folderTree += folder + "/"
|
||||
|
||||
client := http.Client{Timeout: 5 * time.Second}
|
||||
req, err := http.NewRequest("MKCOL", job.user.NextcloudBaseUrl + "/remote.php/dav/files/" + job.user.Username + "/" + folderTree, nil)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
req.SetBasicAuth(job.user.Username, job.user.Password)
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
if (res.StatusCode != 201 && res.StatusCode != 405) {
|
||||
|
||||
}
|
||||
// status code 201 or 405 (already existing)
|
||||
}
|
||||
}
|
||||
|
||||
func (job *convertJob) convertFile(sourceFile string, sourceid int, destinationFile string) {
|
||||
logger.Debug("Trying to convert %s (%d) to %s", sourceFile, sourceid, destinationFile)
|
||||
|
||||
job.createFoldersRecursively(destinationFile)
|
||||
|
||||
client := http.Client{Timeout: 10 * time.Second}
|
||||
req, err := http.NewRequest(http.MethodGet, job.user.NextcloudBaseUrl + "/apps/onlyoffice/downloadas", nil)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
req.SetBasicAuth(job.user.Username, job.user.Password)
|
||||
|
||||
q := req.URL.Query()
|
||||
q.Add("fileId", fmt.Sprint(sourceid))
|
||||
q.Add("toExtension", "pdf")
|
||||
req.URL.RawQuery = q.Encode()
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
// Status Code 200
|
||||
defer res.Body.Close()
|
||||
|
||||
uploadClient := http.Client{Timeout: 10 * time.Second}
|
||||
uploadReq, err := http.NewRequest(http.MethodPut, job.user.NextcloudBaseUrl + "/remote.php/dav/files/" + job.user.Username + "/" + destinationFile, res.Body)
|
||||
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
uploadReq.SetBasicAuth(job.user.Username, job.user.Password)
|
||||
uploadReq.Header.Set("Content-Type", "application/binary")
|
||||
|
||||
res, err = uploadClient.Do(uploadReq)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
|
||||
if (res.StatusCode != 204 && res.StatusCode != 201) {
|
||||
logger.Error("Failed to create file %s (#%d)", destinationFile, res.StatusCode)
|
||||
}
|
||||
// Status Code 201
|
||||
res.Body.Close()
|
||||
}
|
||||
|
||||
func (job *convertJob) deleteFile(filePath string) {
|
||||
client := http.Client{Timeout: 5 * time.Second}
|
||||
|
||||
req, err := http.NewRequest(http.MethodDelete, job.user.NextcloudBaseUrl + "/remote.php/dav/files/" + job.user.Username + "/" + filePath, nil)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
req.SetBasicAuth(job.user.Username, job.user.Password)
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
|
||||
if (res.StatusCode != 204) {
|
||||
logger.Error("Failed to delete file %s (%d)", filePath, res.StatusCode)
|
||||
}
|
||||
}
|
||||
|
||||
// Searches all doc files in the source directory
|
||||
func (job *convertJob) searchInDirectory(directory string, contentType []string) *searchResult {
|
||||
client := http.Client{Timeout: 5 * time.Second}
|
||||
|
||||
template, err := template.ParseFS(web.ApiTemplateFiles, "apitemplate/ncsearch.tmpl.xml")
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
templateData := searchTemplateData{
|
||||
Username: job.user.Username,
|
||||
Directory: directory,
|
||||
ContentType: contentType,
|
||||
}
|
||||
if err = template.Execute(&buf, templateData); err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
// Status code 207
|
||||
req, err := http.NewRequest("SEARCH", job.user.NextcloudBaseUrl + "/remote.php/dav/", &buf)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
req.SetBasicAuth(job.user.Username, job.user.Password)
|
||||
req.Header.Set("Content-Type", "application/xml")
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
|
||||
defer res.Body.Close()
|
||||
|
||||
resBody, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
|
||||
fmt.Print(res.StatusCode)
|
||||
var result searchResult
|
||||
if err = xml.Unmarshal(resBody, &result); err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
|
||||
return &result
|
||||
}
|
|
@ -0,0 +1,199 @@
|
|||
package ncworker
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"rpjosh.de/ncDocConverter/internal/models"
|
||||
"rpjosh.de/ncDocConverter/internal/nextcloud"
|
||||
"rpjosh.de/ncDocConverter/pkg/logger"
|
||||
"rpjosh.de/ncDocConverter/pkg/utils"
|
||||
)
|
||||
|
||||
type convertJob struct {
|
||||
job *models.NcConvertJob
|
||||
ncUser *models.NextcloudUser
|
||||
}
|
||||
|
||||
type ncFiles struct {
|
||||
extension string
|
||||
path string
|
||||
lastModified time.Time
|
||||
contentType string
|
||||
size int
|
||||
fileid int
|
||||
}
|
||||
|
||||
func NewNcJob(job *models.NcConvertJob, ncUser *models.NextcloudUser) *convertJob {
|
||||
convJob := &convertJob{
|
||||
job: job,
|
||||
ncUser: ncUser,
|
||||
}
|
||||
|
||||
return convJob
|
||||
}
|
||||
|
||||
func (job *convertJob) ExecuteJob() {
|
||||
|
||||
// Get existing directory contents
|
||||
source, err := nextcloud.SearchInDirectory(
|
||||
job.ncUser,
|
||||
job.job.SourceDir,
|
||||
[]string{
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
"application/msword",
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get files in source directory '%s': %s", job.job.SourceDir, err)
|
||||
return
|
||||
}
|
||||
|
||||
destination, err := nextcloud.SearchInDirectory(
|
||||
job.ncUser,
|
||||
job.job.DestinationDir,
|
||||
[]string{
|
||||
"application/pdf",
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get files in destination directory '%s': %s", job.job.DestinationDir, err)
|
||||
return
|
||||
}
|
||||
|
||||
preCount := len("/remote.php/dav/files/" + job.ncUser.Username + "/")
|
||||
// Store the files in a map
|
||||
sourceMap := make(map[string]ncFiles)
|
||||
destinationMap := make(map[string]ncFiles)
|
||||
|
||||
for _, file := range source.Response {
|
||||
href, _ := url.QueryUnescape(file.Href)
|
||||
path := href[preCount:]
|
||||
var extension = filepath.Ext(path)
|
||||
var name = path[0 : len(path)-len(extension)][len(job.job.SourceDir):]
|
||||
time := file.GetLastModified()
|
||||
size, err := strconv.Atoi(file.Propstat.Prop.Size)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
sourceMap[name] = ncFiles{
|
||||
extension: extension,
|
||||
path: path,
|
||||
lastModified: time,
|
||||
size: size,
|
||||
contentType: file.Propstat.Prop.Getcontenttype,
|
||||
fileid: file.Propstat.Prop.Fileid,
|
||||
}
|
||||
}
|
||||
|
||||
for _, file := range destination.Response {
|
||||
href, _ := url.QueryUnescape(file.Href)
|
||||
path := href[preCount:]
|
||||
var extension = filepath.Ext(path)
|
||||
var name = path[0 : len(path)-len(extension)][len(job.job.DestinationDir):]
|
||||
|
||||
time, err := time.Parse("Mon, 02 Jan 2006 15:04:05 GMT", file.Propstat.Prop.Getlastmodified)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
size, err := strconv.Atoi(file.Propstat.Prop.Size)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
destinationMap[name] = ncFiles{
|
||||
extension: extension,
|
||||
path: path,
|
||||
lastModified: time,
|
||||
size: size,
|
||||
contentType: file.Propstat.Prop.Getcontenttype,
|
||||
fileid: file.Propstat.Prop.Fileid,
|
||||
}
|
||||
}
|
||||
|
||||
convertCount := 0
|
||||
for index, source := range sourceMap {
|
||||
// check if the file exists in the destination map
|
||||
if dest, exists := destinationMap[index]; exists {
|
||||
// compare timestamp and size
|
||||
if dest.lastModified.Before(source.lastModified) {
|
||||
job.convertFile(source.path, source.fileid, dest.path)
|
||||
convertCount++
|
||||
}
|
||||
delete(destinationMap, index)
|
||||
} else {
|
||||
job.convertFile(
|
||||
source.path, source.fileid, job.getDestinationDir(source.path),
|
||||
)
|
||||
convertCount++
|
||||
delete(destinationMap, index)
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the files which are not available anymore
|
||||
for _, dest := range destinationMap {
|
||||
err := nextcloud.DeleteFile(job.ncUser, dest.path)
|
||||
if err != nil {
|
||||
logger.Error(utils.FirstCharToUppercase(err.Error()))
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("Finished Nextcloud job \"%s\": %d documents converted", job.job.JobName, convertCount)
|
||||
}
|
||||
|
||||
func (job *convertJob) getDestinationDir(sourceFile string) string {
|
||||
sourceFile = sourceFile[len(job.job.SourceDir):]
|
||||
var extension = filepath.Ext(sourceFile)
|
||||
var name = sourceFile[0 : len(sourceFile)-len(extension)]
|
||||
|
||||
return job.job.DestinationDir + name + ".pdf"
|
||||
}
|
||||
|
||||
// Converts the source file to the destination file utilizing the onlyoffice convert api
|
||||
func (job *convertJob) convertFile(sourceFile string, sourceid int, destinationFile string) {
|
||||
logger.Debug("Trying to convert %s (%d) to %s", sourceFile, sourceid, destinationFile)
|
||||
|
||||
nextcloud.CreateFoldersRecursively(job.ncUser, destinationFile)
|
||||
|
||||
client := http.Client{Timeout: 10 * time.Second}
|
||||
req, err := http.NewRequest(http.MethodGet, job.ncUser.NextcloudBaseUrl+"/apps/onlyoffice/downloadas", nil)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
req.SetBasicAuth(job.ncUser.Username, job.ncUser.Password)
|
||||
|
||||
q := req.URL.Query()
|
||||
q.Add("fileId", fmt.Sprint(sourceid))
|
||||
q.Add("toExtension", "pdf")
|
||||
req.URL.RawQuery = q.Encode()
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
// Status Code 200
|
||||
defer res.Body.Close()
|
||||
|
||||
uploadClient := http.Client{Timeout: 10 * time.Second}
|
||||
uploadReq, err := http.NewRequest(http.MethodPut, job.ncUser.NextcloudBaseUrl+"/remote.php/dav/files/"+job.ncUser.Username+"/"+destinationFile, res.Body)
|
||||
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
uploadReq.SetBasicAuth(job.ncUser.Username, job.ncUser.Password)
|
||||
uploadReq.Header.Set("Content-Type", "application/binary")
|
||||
|
||||
res, err = uploadClient.Do(uploadReq)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
|
||||
if res.StatusCode != 204 && res.StatusCode != 201 {
|
||||
logger.Error("Failed to create file %s (#%d)", destinationFile, res.StatusCode)
|
||||
}
|
||||
// Status Code 201
|
||||
res.Body.Close()
|
||||
}
|
|
@ -0,0 +1,178 @@
|
|||
package nextcloud
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"rpjosh.de/ncDocConverter/internal/models"
|
||||
"rpjosh.de/ncDocConverter/pkg/logger"
|
||||
"rpjosh.de/ncDocConverter/web"
|
||||
)
|
||||
|
||||
type searchTemplateData struct {
|
||||
Username string
|
||||
Directory string
|
||||
ContentType []string
|
||||
}
|
||||
|
||||
type searchResult struct {
|
||||
XMLName xml.Name `xml:"multistatus"`
|
||||
Text string `xml:",chardata"`
|
||||
D string `xml:"d,attr"`
|
||||
S string `xml:"s,attr"`
|
||||
Oc string `xml:"oc,attr"`
|
||||
Nc string `xml:"nc,attr"`
|
||||
Response []searchResultResponse `xml:"response"`
|
||||
}
|
||||
type searchResultResponse struct {
|
||||
Text string `xml:",chardata"`
|
||||
Href string `xml:"href"`
|
||||
Propstat struct {
|
||||
Text string `xml:",chardata"`
|
||||
Prop struct {
|
||||
Text string `xml:",chardata"`
|
||||
Getcontenttype string `xml:"getcontenttype"`
|
||||
Getlastmodified string `xml:"getlastmodified"`
|
||||
Size string `xml:"size"`
|
||||
Fileid int `xml:"fileid"`
|
||||
} `xml:"prop"`
|
||||
Status string `xml:"status"`
|
||||
} `xml:"propstat"`
|
||||
}
|
||||
|
||||
func (r *searchResultResponse) GetLastModified() time.Time {
|
||||
// Time format: Fri, 23 Sep 2022 05:46:31 GMT
|
||||
rtc, err := time.Parse("Mon, 02 Jan 2006 15:04:05 GMT", r.Propstat.Prop.Getlastmodified)
|
||||
if err != nil {
|
||||
logger.Warning("%s", err)
|
||||
rtc = time.Unix(0, 1)
|
||||
}
|
||||
|
||||
return rtc
|
||||
}
|
||||
|
||||
// Returns a new request to the Nexcloud API.
|
||||
// The path beginning AFTER /dav/ should be given (e.g.: myUser/folder/file.txt)
|
||||
func getRequest(method string, path string, body io.Reader, ncUser *models.NextcloudUser) *http.Request {
|
||||
req, err := http.NewRequest(method, ncUser.NextcloudBaseUrl+"/remote.php/dav/"+path, body)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
req.SetBasicAuth(ncUser.Username, ncUser.Password)
|
||||
|
||||
return req
|
||||
}
|
||||
|
||||
// Searches for all files of the given content type starting in the given directory.
|
||||
func SearchInDirectory(ncUser *models.NextcloudUser, directory string, contentType []string) (*searchResult, error) {
|
||||
client := http.Client{Timeout: 5 * time.Second}
|
||||
|
||||
template, err := template.ParseFS(web.ApiTemplateFiles, "apitemplate/ncsearch.tmpl.xml")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
templateData := searchTemplateData{
|
||||
Username: ncUser.Username,
|
||||
Directory: directory,
|
||||
ContentType: contentType,
|
||||
}
|
||||
if err = template.Execute(&buf, templateData); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Status code 207
|
||||
req := getRequest("SEARCH", "", &buf, ncUser)
|
||||
req.Header.Set("Content-Type", "application/xml")
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Decody body first before checking status code to print in error message
|
||||
defer res.Body.Close()
|
||||
|
||||
resBody, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if res.StatusCode != 207 {
|
||||
return nil, fmt.Errorf("status code %d: %s", res.StatusCode, resBody)
|
||||
}
|
||||
|
||||
var result searchResult
|
||||
if err = xml.Unmarshal(resBody, &result); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
// Delets a file with the given path.
|
||||
// The path has to start at the root level: Ebook/myFolder/file.txt
|
||||
func DeleteFile(ncUser *models.NextcloudUser, filePath string) error {
|
||||
client := http.Client{Timeout: 5 * time.Second}
|
||||
|
||||
req := getRequest(http.MethodDelete, "files/"+ncUser.Username+"/"+filePath, nil, ncUser)
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
|
||||
if res.StatusCode != 204 {
|
||||
return fmt.Errorf("failed to delete file %s (%d)", filePath, res.StatusCode)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Creates all required directorys to create the destination file recursively.
|
||||
// The path should be relative to the root: ebook/folder1/folder2/file.txt
|
||||
func CreateFoldersRecursively(ncUser *models.NextcloudUser, destinationFile string) {
|
||||
s := strings.Split(destinationFile, "/")
|
||||
folderTree := ""
|
||||
|
||||
// Webdav doesn't have a function to create directories recursively → iterate
|
||||
for _, folder := range s[:len(s)-1] {
|
||||
folderTree += folder + "/"
|
||||
|
||||
client := http.Client{Timeout: 5 * time.Second}
|
||||
req := getRequest("MKCOL", "files/"+ncUser.Username+"/"+folderTree, nil, ncUser)
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
logger.Error("%s", err)
|
||||
}
|
||||
|
||||
if res.StatusCode != 201 && res.StatusCode != 405 {
|
||||
logger.Error("Failed to create directorys")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Uploads a file to the nextcloud server.
|
||||
// It will be saved to the destination as a relative path to the nextcloud root (ebook/file.txt).
|
||||
func UploadFile(ncUser *models.NextcloudUser, destination string, content io.ReadCloser) error {
|
||||
client := http.Client{Timeout: 5 * time.Second}
|
||||
req := getRequest(http.MethodPut, "files/"+ncUser.Username+"/"+destination, content, ncUser)
|
||||
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if res.StatusCode != 201 && res.StatusCode != 204 {
|
||||
return fmt.Errorf("expected status code 201 or 204 but got %d", res.StatusCode)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -3,17 +3,18 @@ package logger
|
|||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"os"
|
||||
)
|
||||
|
||||
// Level of the log message
|
||||
type Level uint8
|
||||
|
||||
const (
|
||||
LevelDebug Level = iota
|
||||
LevelDebug Level = iota
|
||||
LevelInfo
|
||||
LevelWarning
|
||||
LevelError
|
||||
|
@ -21,23 +22,23 @@ const (
|
|||
)
|
||||
|
||||
type Logger struct {
|
||||
PrintLevel Level
|
||||
LogLevel Level
|
||||
LogFilePath string
|
||||
PrintSource bool
|
||||
PrintLevel Level
|
||||
LogLevel Level
|
||||
LogFilePath string
|
||||
PrintSource bool
|
||||
|
||||
consoleLogger *log.Logger
|
||||
consoleLoggerErr *log.Logger
|
||||
fileLogger *log.Logger
|
||||
logFile *os.File
|
||||
consoleLogger *log.Logger
|
||||
consoleLoggerErr *log.Logger
|
||||
fileLogger *log.Logger
|
||||
logFile *os.File
|
||||
}
|
||||
|
||||
var dLogger Logger
|
||||
|
||||
func init() {
|
||||
dLogger = Logger {
|
||||
PrintLevel: LevelDebug,
|
||||
LogLevel: LevelInfo,
|
||||
dLogger = Logger{
|
||||
PrintLevel: LevelDebug,
|
||||
LogLevel: LevelInfo,
|
||||
LogFilePath: "",
|
||||
PrintSource: false,
|
||||
}
|
||||
|
@ -52,44 +53,49 @@ func (l Logger) Log(level Level, message string, parameters ...any) {
|
|||
|
||||
func (l Logger) log(level Level, message string, parameters ...any) {
|
||||
pc, file, line, ok := runtime.Caller(3)
|
||||
if (!ok) {
|
||||
if !ok {
|
||||
file = "#unknown"
|
||||
line = 0
|
||||
}
|
||||
|
||||
// get the name of the level
|
||||
var levelName string
|
||||
switch (level) {
|
||||
case LevelDebug: levelName = "DEBUG"
|
||||
case LevelInfo: levelName = "INFO "
|
||||
case LevelWarning: levelName = "WARN "
|
||||
case LevelError: levelName = "ERROR"
|
||||
case LevelFatal: levelName = "FATAL"
|
||||
switch level {
|
||||
case LevelDebug:
|
||||
levelName = "DEBUG"
|
||||
case LevelInfo:
|
||||
levelName = "INFO "
|
||||
case LevelWarning:
|
||||
levelName = "WARN "
|
||||
case LevelError:
|
||||
levelName = "ERROR"
|
||||
case LevelFatal:
|
||||
levelName = "FATAL"
|
||||
}
|
||||
|
||||
if (levelName == "") {
|
||||
if levelName == "" {
|
||||
message = fmt.Sprintf("Invalid level value given: %d. Original message: ", level) + message
|
||||
levelName = "WARN "
|
||||
level = LevelWarning
|
||||
}
|
||||
|
||||
printMessage := "[" + levelName + "] " + time.Now().UTC().Format("2006-01-02 03:04:05") +
|
||||
printMessage := "[" + levelName + "] " + time.Now().Local().Format("2006-01-02 15:04:05") +
|
||||
getSourceMessage(file, line, pc, l) +
|
||||
fmt.Sprintf(message, parameters...)
|
||||
|
||||
if (l.LogLevel <= level && l.fileLogger != nil) {
|
||||
if l.LogLevel <= level && l.fileLogger != nil {
|
||||
l.fileLogger.Println(printMessage)
|
||||
l.logFile.Sync()
|
||||
|
||||
if (level == LevelFatal) {
|
||||
if level == LevelFatal {
|
||||
l.CloseFile()
|
||||
}
|
||||
}
|
||||
|
||||
if (l.PrintLevel <= level) {
|
||||
if (level == LevelError) {
|
||||
if l.PrintLevel <= level {
|
||||
if level == LevelError {
|
||||
l.consoleLoggerErr.Println(printMessage)
|
||||
} else if (level == LevelFatal) {
|
||||
} else if level == LevelFatal {
|
||||
l.consoleLoggerErr.Fatal(printMessage)
|
||||
} else {
|
||||
l.consoleLogger.Println(printMessage)
|
||||
|
@ -98,8 +104,8 @@ func (l Logger) log(level Level, message string, parameters ...any) {
|
|||
|
||||
}
|
||||
|
||||
func getSourceMessage(file string, line int, pc uintptr, l Logger) (string) {
|
||||
if (!l.PrintSource) {
|
||||
func getSourceMessage(file string, line int, pc uintptr, l Logger) string {
|
||||
if !l.PrintSource {
|
||||
return " - "
|
||||
}
|
||||
|
||||
|
@ -113,7 +119,7 @@ func (l *Logger) setup() {
|
|||
l.consoleLogger = log.New(os.Stdout, "", 0)
|
||||
l.consoleLoggerErr = log.New(os.Stderr, "", 0)
|
||||
|
||||
if (l.LogFilePath != "") {
|
||||
if strings.TrimSpace(l.LogFilePath) != "" {
|
||||
file, err := os.OpenFile(l.LogFilePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||
if err == nil {
|
||||
l.fileLogger = log.New(file, "", 0)
|
||||
|
@ -123,7 +129,7 @@ func (l *Logger) setup() {
|
|||
}
|
||||
} else {
|
||||
l.fileLogger = nil
|
||||
if (l.logFile != nil) {
|
||||
if l.logFile != nil {
|
||||
l.logFile.Close()
|
||||
l.logFile = nil
|
||||
}
|
||||
|
@ -131,19 +137,18 @@ func (l *Logger) setup() {
|
|||
}
|
||||
|
||||
func (l *Logger) CloseFile() {
|
||||
if (l.logFile != nil) {
|
||||
if l.logFile != nil {
|
||||
l.logFile.Close()
|
||||
l.logFile = nil
|
||||
l.fileLogger = nil
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func SetGlobalLogger(l *Logger) {
|
||||
dLogger = *l
|
||||
dLogger.setup()
|
||||
}
|
||||
func GetGlobalLogger() (*Logger) {
|
||||
func GetGlobalLogger() *Logger {
|
||||
return &dLogger
|
||||
}
|
||||
|
||||
|
@ -172,14 +177,20 @@ func CloseFile() {
|
|||
// If an incorrect level name was given an warning is logged and info will be returned
|
||||
func GetLevelByName(levelName string) Level {
|
||||
levelName = strings.ToLower(levelName)
|
||||
switch (levelName) {
|
||||
case "debug": return LevelDebug
|
||||
case "info": return LevelInfo
|
||||
case "warn", "warning": return LevelWarning
|
||||
case "error": return LevelError
|
||||
case "panic", "fatal": return LevelFatal
|
||||
switch levelName {
|
||||
case "debug":
|
||||
return LevelDebug
|
||||
case "info":
|
||||
return LevelInfo
|
||||
case "warn", "warning":
|
||||
return LevelWarning
|
||||
case "error":
|
||||
return LevelError
|
||||
case "panic", "fatal":
|
||||
return LevelFatal
|
||||
|
||||
default: {
|
||||
default:
|
||||
{
|
||||
Warning("Unable to parse the level name '%s'. Expected 'debug', 'info', 'warn', 'error' or 'fatal'", levelName)
|
||||
return LevelInfo
|
||||
}
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
package utils
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
// Makes the first character of the given string to uppercase
|
||||
func FirstCharToUppercase(text string) string {
|
||||
a := []rune(text)
|
||||
a[0] = unicode.ToLower(a[0])
|
||||
return string(a)
|
||||
}
|
||||
|
||||
// Copys a map. The structs are also cloned
|
||||
func CopyMap[T comparable, Val any](m map[T]Val) map[T]Val {
|
||||
cp := make(map[T]Val)
|
||||
for k, v := range m {
|
||||
var u Val
|
||||
Copy(&v, &u)
|
||||
cp[k] = u
|
||||
}
|
||||
|
||||
return cp
|
||||
}
|
||||
|
||||
// Copies a struct
|
||||
func Copy(source interface{}, destin interface{}) {
|
||||
x := reflect.ValueOf(source)
|
||||
if reflect.ValueOf(destin).Kind() != reflect.Ptr {
|
||||
return
|
||||
}
|
||||
if x.Kind() == reflect.Ptr {
|
||||
reflect.ValueOf(destin).Elem().Set(x.Elem())
|
||||
} else {
|
||||
reflect.ValueOf(destin).Elem().Set(x)
|
||||
}
|
||||
}
|
|
@ -1,3 +1,3 @@
|
|||
#!/bin/sh
|
||||
|
||||
./web/app/node_modules/.bin/nodemon --delay 10s -e go,html --ignore web/app/ --signal SIGTERM --exec go run ./cmd/ncDocConverth || exit 1
|
||||
nodemon --delay 1s -e go,html,yaml --ignore web/app/ --signal SIGTERM --exec 'go run ./cmd/ncDocConverth || exit 1'
|
|
@ -1,24 +0,0 @@
|
|||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
|
@ -1,13 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Vite + React + TS</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
File diff suppressed because it is too large
Load Diff
|
@ -1,25 +0,0 @@
|
|||
{
|
||||
"name": "vite-number-conversion",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc && vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"antd": "^4.23.2",
|
||||
"axios": "^0.27.2",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.0.17",
|
||||
"@types/react-dom": "^18.0.6",
|
||||
"@vitejs/plugin-react": "^2.1.0",
|
||||
"nodemon": "^2.0.20",
|
||||
"typescript": "^4.6.4",
|
||||
"vite": "^3.1.0"
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
#root {
|
||||
max-width: 1280px;
|
||||
margin: 0 auto;
|
||||
padding: 2rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.logo {
|
||||
height: 7em;
|
||||
padding: 1.5em;
|
||||
will-change: filter;
|
||||
}
|
||||
.logo:hover {
|
||||
filter: drop-shadow(0 0 2em #646cffaa);
|
||||
}
|
||||
.logo.react:hover {
|
||||
filter: drop-shadow(0 0 2em #61dafbaa);
|
||||
}
|
||||
|
||||
@keyframes logo-spin {
|
||||
from {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion: no-preference) {
|
||||
a:nth-of-type(2) .logo {
|
||||
animation: logo-spin infinite 20s linear;
|
||||
}
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: 2em;
|
||||
}
|
||||
|
||||
.read-the-docs {
|
||||
color: #888;
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
import { useState } from 'react'
|
||||
import reactLogo from './assets/react.svg'
|
||||
import viteLogo from './assets/vite.svg'
|
||||
import './App.css'
|
||||
|
||||
function App() {
|
||||
const [count, setCount] = useState(0)
|
||||
|
||||
const hi: string = "Servus"
|
||||
return (
|
||||
<div className="App">
|
||||
<div>
|
||||
<a href="https://vitejs.dev" target="_blank">
|
||||
<img src={viteLogo} className="logo" alt="Vite logo" />
|
||||
</a>
|
||||
<a href="https://reactjs.org" target="_blank">
|
||||
<img src={reactLogo} className="logo react" alt="React logo" />
|
||||
</a>
|
||||
</div>
|
||||
<h1>Vite + React</h1>
|
||||
<div className="card">
|
||||
<button onClick={() => setCount((count) => count + 1)}>
|
||||
count is {count}
|
||||
</button>
|
||||
<p>
|
||||
Edit <code>src/App.tsx</code> and save to test HMR
|
||||
</p>
|
||||
</div>
|
||||
<p className="read-the-docs">
|
||||
Click on the Vite and React logos to learn more
|
||||
</p>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default App
|
|
@ -1,3 +0,0 @@
|
|||
#HELLOIHRBOYSDA {
|
||||
background-color: aliceblue;
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
import React from 'react'
|
||||
import ReactDOM from 'react-dom/client'
|
||||
import App from './App'
|
||||
import './Sep.css'
|
||||
|
||||
ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render(
|
||||
<React.StrictMode>
|
||||
<div id="HELLOIHRBOYSDA">
|
||||
|
||||
</div>
|
||||
</React.StrictMode>
|
||||
)
|
|
@ -1 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>
|
Before Width: | Height: | Size: 4.0 KiB |
|
@ -1 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>
|
Before Width: | Height: | Size: 1.5 KiB |
|
@ -1,70 +0,0 @@
|
|||
:root {
|
||||
font-family: Inter, Avenir, Helvetica, Arial, sans-serif;
|
||||
font-size: 16px;
|
||||
line-height: 24px;
|
||||
font-weight: 400;
|
||||
|
||||
color-scheme: light dark;
|
||||
color: rgba(255, 255, 255, 0.87);
|
||||
background-color: #242424;
|
||||
|
||||
font-synthesis: none;
|
||||
text-rendering: optimizeLegibility;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
}
|
||||
|
||||
a {
|
||||
font-weight: 500;
|
||||
color: #646cff;
|
||||
text-decoration: inherit;
|
||||
}
|
||||
a:hover {
|
||||
color: #535bf2;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
display: flex;
|
||||
place-items: center;
|
||||
min-width: 320px;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 3.2em;
|
||||
line-height: 1.1;
|
||||
}
|
||||
|
||||
button {
|
||||
border-radius: 8px;
|
||||
border: 1px solid transparent;
|
||||
padding: 0.6em 1.2em;
|
||||
font-size: 1em;
|
||||
font-weight: 500;
|
||||
font-family: inherit;
|
||||
background-color: #1a1a1a;
|
||||
cursor: pointer;
|
||||
transition: border-color 0.25s;
|
||||
}
|
||||
button:hover {
|
||||
border-color: #646cff;
|
||||
}
|
||||
button:focus,
|
||||
button:focus-visible {
|
||||
outline: 4px auto -webkit-focus-ring-color;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: light) {
|
||||
:root {
|
||||
color: #213547;
|
||||
background-color: #ffffff;
|
||||
}
|
||||
a:hover {
|
||||
color: #747bff;
|
||||
}
|
||||
button {
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
import React from 'react'
|
||||
import ReactDOM from 'react-dom/client'
|
||||
import App from './App'
|
||||
import './index.css'
|
||||
|
||||
ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render(
|
||||
<React.StrictMode>
|
||||
<App />
|
||||
</React.StrictMode>
|
||||
)
|
|
@ -1 +0,0 @@
|
|||
/// <reference types="vite/client" />
|
|
@ -1,21 +0,0 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ESNext",
|
||||
"useDefineForClassFields": true,
|
||||
"lib": ["DOM", "DOM.Iterable", "ESNext"],
|
||||
"allowJs": false,
|
||||
"skipLibCheck": true,
|
||||
"esModuleInterop": false,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"strict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Node",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx"
|
||||
},
|
||||
"include": ["src"],
|
||||
"references": [{ "path": "./tsconfig.node.json" }]
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Node",
|
||||
"allowSyntheticDefaultImports": true
|
||||
},
|
||||
"include": ["vite.config.ts"]
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
import { defineConfig } from 'vite'
|
||||
import react from '@vitejs/plugin-react'
|
||||
|
||||
// https://vitejs.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
build: {
|
||||
rollupOptions: {
|
||||
output: {
|
||||
entryFileNames: `assets/[name].js`,
|
||||
chunkFileNames: `assets/[name].js`,
|
||||
assetFileNames: `assets/[name].[ext]`
|
||||
},
|
||||
input: {
|
||||
main: "src/main.tsx",
|
||||
sep: "src/Sep.tsx",
|
||||
},
|
||||
}
|
||||
}
|
||||
})
|
11
web/efs.go
11
web/efs.go
|
@ -1,17 +1,8 @@
|
|||
package web
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"embed"
|
||||
)
|
||||
|
||||
//go:embed "app/dist"
|
||||
var FrontendFiles embed.FS
|
||||
|
||||
//go:embed "app/src"
|
||||
var DevelopeFiles embed.FS
|
||||
|
||||
//go:embed "template"
|
||||
var TemplateFiles embed.FS
|
||||
|
||||
//go:embed "apitemplate"
|
||||
var ApiTemplateFiles embed.FS
|
|
@ -1,16 +0,0 @@
|
|||
{{define "base"}}
|
||||
<!doctype html>
|
||||
<html lang='de'>
|
||||
<head>
|
||||
<meta charset='utf-8'>
|
||||
<title>Vite + React + Go</title>
|
||||
<!-- <base href="http://localhost:5173" > -->
|
||||
{{if not .ServerConfig.Development}}
|
||||
<script type="module" src='{{ getJSFile "jsx-runtime"}}'></script>
|
||||
{{end}}
|
||||
</head>
|
||||
<body>
|
||||
{{template "main" .}}
|
||||
</body>
|
||||
</html>
|
||||
{{end}}
|
|
@ -1,12 +0,0 @@
|
|||
{{define "main"}}
|
||||
<div id="root"></div>
|
||||
|
||||
{{template "vitejs" .}}
|
||||
|
||||
<script type="module" src='{{ getJSFile "main"}}'></script>
|
||||
{{if not .ServerConfig.Development}}
|
||||
<link rel="stylesheet" href="assets/main.css">
|
||||
{{end}}
|
||||
|
||||
|
||||
{{end}}
|
|
@ -1,11 +0,0 @@
|
|||
{{define "vitejs"}}
|
||||
{{if .ServerConfig.Development}}
|
||||
<script type="module">
|
||||
import RefreshRuntime from "{{.ServerConfig.SourceServer}}@react-refresh"
|
||||
RefreshRuntime.injectIntoGlobalHook(window)
|
||||
window.$RefreshReg$ = () => {}
|
||||
window.$RefreshSig$ = () => (type) => type
|
||||
window.__vite_plugin_react_preamble_installed__ = true
|
||||
</script>
|
||||
{{end}}
|
||||
{{end}}
|
Loading…
Reference in New Issue