From 225ca409d36468a8a1452e03b9d5292c40aa0c23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?K=C3=A9vin=20Dunglas?= Date: Fri, 12 Dec 2025 14:29:18 +0100 Subject: [PATCH] feat: hot reload (#2031) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This patch brings hot reloading capabilities to PHP apps: in development, the browser will automatically refresh the page when any source file changes! It's similar to HMR in JavaScript. It is built on top of [the watcher mechanism](https://frankenphp.dev/docs/config/#watching-for-file-changes) and of the [Mercure](https://frankenphp.dev/docs/mercure/) integration. Each time a watched file is modified, a Mercure update is sent, giving the ability to the client to reload the page, or part of the page (assets, images...). Here is an example implementation: ```caddyfile root ./public mercure { subscriber_jwt {env.MERCURE_SUBSCRIBER_JWT_KEY} anonymous } php_server { hot_reload } ``` ```php Test Hello ``` I plan to create a helper JS library to handle more advanced cases (reloading CSS, JS, etc), similar to [HotWire Spark](https://github.com/hotwired/spark). Be sure to attend my SymfonyCon to learn more! There is still room for improvement: - Provide an option to only trigger the update without reloading the worker for some files (ex, images, JS, CSS...) - Support classic mode (currently, only the worker mode is supported) - Don't reload all workers when only the files used by one change However, this PR is working as-is and can be merged as a first step. This patch heavily refactors the watcher module. Maybe it will be possible to extract it as a standalone library at some point (would be useful to add a similar feature but not tight to PHP as a Caddy module). --------- Signed-off-by: Kévin Dunglas Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- caddy/admin_test.go | 3 +- caddy/app.go | 43 ++- caddy/caddy.go | 2 +- caddy/caddy_test.go | 2 +- caddy/config_test.go | 2 +- caddy/go.mod | 5 +- caddy/go.sum | 2 + caddy/hotreload-skip.go | 20 ++ caddy/hotreload.go | 111 ++++++++ caddy/hotreload_test.go | 88 +++++++ caddy/mercure-skip.go | 13 +- caddy/mercure.go | 22 +- caddy/module.go | 49 ++-- caddy/workerconfig.go | 13 +- frankenphp.go | 14 +- frankenphp_test.go | 3 +- go.mod | 3 +- go.sum | 2 + hotreload.go | 44 ++++ internal/fastabs/filepath_unix.go | 18 +- internal/state/state.go | 18 +- internal/watcher/cgo.go | 6 - internal/watcher/pattern.go | 218 +++++++++++++++ internal/watcher/pattern_test.go | 350 +++++++++++++++++++++++++ internal/watcher/watch_pattern.go | 178 ------------- internal/watcher/watch_pattern_test.go | 187 ------------- internal/watcher/watcher-skip.go | 17 -- internal/watcher/watcher.c | 26 -- internal/watcher/watcher.go | 245 +++++++++-------- internal/watcher/watcher.h | 6 - mercure-skip.go | 8 +- mercure.go | 20 ++ options.go | 4 + phpmainthread_test.go | 10 +- scaling_test.go | 16 +- watcher-skip.go | 23 ++ watcher.go | 47 ++++ watcher_test.go | 16 +- worker.go | 62 ++--- workerextension_test.go | 2 + 40 files changed, 1247 insertions(+), 671 deletions(-) create mode 100644 caddy/hotreload-skip.go create mode 100644 caddy/hotreload.go create mode 100644 caddy/hotreload_test.go create mode 100644 hotreload.go delete mode 100644 internal/watcher/cgo.go create mode 100644 internal/watcher/pattern.go create mode 100644 internal/watcher/pattern_test.go delete mode 100644 internal/watcher/watch_pattern.go delete mode 100644 internal/watcher/watch_pattern_test.go delete mode 100644 internal/watcher/watcher-skip.go delete mode 100644 internal/watcher/watcher.c delete mode 100644 internal/watcher/watcher.h create mode 100644 watcher-skip.go create mode 100644 watcher.go diff --git a/caddy/admin_test.go b/caddy/admin_test.go index 17f211cd..ad0b5a8e 100644 --- a/caddy/admin_test.go +++ b/caddy/admin_test.go @@ -4,12 +4,13 @@ import ( "bytes" "encoding/json" "fmt" - "github.com/dunglas/frankenphp/internal/fastabs" "io" "net/http" "sync" "testing" + "github.com/dunglas/frankenphp/internal/fastabs" + "github.com/caddyserver/caddy/v2/caddytest" "github.com/dunglas/frankenphp" "github.com/stretchr/testify/assert" diff --git a/caddy/app.go b/caddy/app.go index e9c31c9f..22d7e04c 100644 --- a/caddy/app.go +++ b/caddy/app.go @@ -49,13 +49,14 @@ type FrankenPHPApp struct { NumThreads int `json:"num_threads,omitempty"` // MaxThreads limits how many threads can be started at runtime. Default 2x NumThreads MaxThreads int `json:"max_threads,omitempty"` - // Workers configures the worker scripts to start. + // Workers configures the worker scripts to start Workers []workerConfig `json:"workers,omitempty"` // Overwrites the default php ini configuration PhpIni map[string]string `json:"php_ini,omitempty"` // The maximum amount of time a request may be stalled waiting for a thread MaxWaitTime time.Duration `json:"max_wait_time,omitempty"` + opts []frankenphp.Option metrics frankenphp.Metrics ctx context.Context logger *slog.Logger @@ -76,6 +77,9 @@ func (f *FrankenPHPApp) Provision(ctx caddy.Context) error { f.ctx = ctx f.logger = ctx.Slogger() + // We have at least 7 hardcoded options + f.opts = make([]frankenphp.Option, 0, 7+len(options)) + if httpApp, err := ctx.AppIfConfigured("http"); err == nil { if httpApp.(*caddyhttp.App).Metrics != nil { f.metrics = frankenphp.NewPrometheusMetrics(ctx.GetMetricsRegistry()) @@ -135,11 +139,10 @@ func (f *FrankenPHPApp) Start() error { repl := caddy.NewReplacer() optionsMU.RLock() - opts := make([]frankenphp.Option, 0, len(options)+len(f.Workers)+7) - opts = append(opts, options...) + f.opts = append(f.opts, options...) optionsMU.RUnlock() - opts = append(opts, + f.opts = append(f.opts, frankenphp.WithContext(f.ctx), frankenphp.WithLogger(f.logger), frankenphp.WithNumThreads(f.NumThreads), @@ -150,31 +153,19 @@ func (f *FrankenPHPApp) Start() error { ) for _, w := range f.Workers { - workerOpts := make([]frankenphp.WorkerOption, 0, len(w.requestOptions)+4) + w.options = append(w.options, + frankenphp.WithWorkerEnv(w.Env), + frankenphp.WithWorkerWatchMode(w.Watch), + frankenphp.WithWorkerMaxFailures(w.MaxConsecutiveFailures), + frankenphp.WithWorkerMaxThreads(w.MaxThreads), + frankenphp.WithWorkerRequestOptions(w.requestOptions...), + ) - if w.requestOptions == nil { - workerOpts = append(workerOpts, - frankenphp.WithWorkerEnv(w.Env), - frankenphp.WithWorkerWatchMode(w.Watch), - frankenphp.WithWorkerMaxFailures(w.MaxConsecutiveFailures), - frankenphp.WithWorkerMaxThreads(w.MaxThreads), - ) - } else { - workerOpts = append( - workerOpts, - frankenphp.WithWorkerEnv(w.Env), - frankenphp.WithWorkerWatchMode(w.Watch), - frankenphp.WithWorkerMaxFailures(w.MaxConsecutiveFailures), - frankenphp.WithWorkerMaxThreads(w.MaxThreads), - frankenphp.WithWorkerRequestOptions(w.requestOptions...), - ) - } - - opts = append(opts, frankenphp.WithWorkers(w.Name, repl.ReplaceKnown(w.FileName, ""), w.Num, workerOpts...)) + f.opts = append(f.opts, frankenphp.WithWorkers(w.Name, repl.ReplaceKnown(w.FileName, ""), w.Num, w.options...)) } frankenphp.Shutdown() - if err := frankenphp.Init(opts...); err != nil { + if err := frankenphp.Init(f.opts...); err != nil { return err } @@ -288,7 +279,7 @@ func (f *FrankenPHPApp) UnmarshalCaddyfile(d *caddyfile.Dispenser) error { } case "worker": - wc, err := parseWorkerConfig(d) + wc, err := unmarshalWorker(d) if err != nil { return err } diff --git a/caddy/caddy.go b/caddy/caddy.go index 9eb6162d..0cc330f9 100644 --- a/caddy/caddy.go +++ b/caddy/caddy.go @@ -12,7 +12,7 @@ import ( const ( defaultDocumentRoot = "public" - defaultWatchPattern = "./**/*.{php,yaml,yml,twig,env}" + defaultWatchPattern = "./**/*.{env,php,twig,yaml,yml}" ) func init() { diff --git a/caddy/caddy_test.go b/caddy/caddy_test.go index 7cf69bfa..5f95c0ab 100644 --- a/caddy/caddy_test.go +++ b/caddy/caddy_test.go @@ -1362,7 +1362,7 @@ func TestWorkerMatchDirective(t *testing.T) { } `, "caddyfile") - // worker is outside of public directory, match anyways + // worker is outside public directory, match anyway tester.AssertGetResponse("http://localhost:"+testPort+"/matched-path", http.StatusOK, "requests:1") tester.AssertGetResponse("http://localhost:"+testPort+"/matched-path/anywhere", http.StatusOK, "requests:2") diff --git a/caddy/config_test.go b/caddy/config_test.go index 29fee1d2..a26d065a 100644 --- a/caddy/config_test.go +++ b/caddy/config_test.go @@ -181,7 +181,7 @@ func TestModuleWorkerWithWatchConfiguration(t *testing.T) { // Verify that the watch directories were set correctly require.Len(t, module.Workers[0].Watch, 3, "Expected three watch patterns") - require.Equal(t, "./**/*.{php,yaml,yml,twig,env}", module.Workers[0].Watch[0], "First watch pattern should be the default") + require.Equal(t, defaultWatchPattern, module.Workers[0].Watch[0], "First watch pattern should be the default") require.Equal(t, "./src/**/*.php", module.Workers[0].Watch[1], "Second watch pattern should match the configuration") require.Equal(t, "./config/**/*.yaml", module.Workers[0].Watch[2], "Third watch pattern should match the configuration") } diff --git a/caddy/go.mod b/caddy/go.mod index bf10425d..82c615b7 100644 --- a/caddy/go.mod +++ b/caddy/go.mod @@ -1,6 +1,6 @@ module github.com/dunglas/frankenphp/caddy -go 1.25.0 +go 1.25.4 replace github.com/dunglas/frankenphp => ../ @@ -11,6 +11,7 @@ require ( github.com/caddyserver/certmagic v0.25.0 github.com/dunglas/caddy-cbrotli v1.0.1 github.com/dunglas/frankenphp v1.10.1 + github.com/dunglas/mercure v0.21.2 github.com/dunglas/mercure/caddy v0.21.2 github.com/dunglas/vulcain/caddy v1.2.1 github.com/prometheus/client_golang v1.23.2 @@ -59,10 +60,10 @@ require ( github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da // indirect github.com/dlclark/regexp2 v1.11.5 // indirect github.com/dunglas/httpsfv v1.1.0 // indirect - github.com/dunglas/mercure v0.21.2 // indirect github.com/dunglas/skipfilter v1.0.0 // indirect github.com/dunglas/vulcain v1.2.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect + github.com/e-dant/watcher/watcher-go v0.0.0-20251208164151-f88ec3b7e146 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.9.0 // indirect github.com/fxamacker/cbor/v2 v2.9.0 // indirect diff --git a/caddy/go.sum b/caddy/go.sum index 72b072b2..f73527e7 100644 --- a/caddy/go.sum +++ b/caddy/go.sum @@ -159,6 +159,8 @@ github.com/dunglas/vulcain/caddy v1.2.1/go.mod h1:8QrmLTfURmW2VgjTR6Gb9a53FrZjsp github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/e-dant/watcher/watcher-go v0.0.0-20251208164151-f88ec3b7e146 h1:h3vVM6X45PK0mAk8NqiYNQGXTyhvXy1HQ5GhuQN4eeA= +github.com/e-dant/watcher/watcher-go v0.0.0-20251208164151-f88ec3b7e146/go.mod h1:sVUOkwtftoj71nnJRG2S0oWNfXFdKpz/M9vK0z06nmM= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= diff --git a/caddy/hotreload-skip.go b/caddy/hotreload-skip.go new file mode 100644 index 00000000..9915638f --- /dev/null +++ b/caddy/hotreload-skip.go @@ -0,0 +1,20 @@ +//go:build nowatcher || nomercure + +package caddy + +import ( + "errors" + + "github.com/caddyserver/caddy/v2/modules/caddyhttp" +) + +type hotReloadContext struct { +} + +func (_ *FrankenPHPModule) configureHotReload(_ *FrankenPHPApp) error { + return nil +} + +func (_ *FrankenPHPModule) unmarshalHotReload(d *caddyfile.Dispenser) error { + return errors.New("hot reload support disabled") +} diff --git a/caddy/hotreload.go b/caddy/hotreload.go new file mode 100644 index 00000000..8a844bf9 --- /dev/null +++ b/caddy/hotreload.go @@ -0,0 +1,111 @@ +//go:build !nowatcher && !nomercure + +package caddy + +import ( + "bytes" + "encoding/gob" + "errors" + "fmt" + "hash/fnv" + "net/url" + + "github.com/caddyserver/caddy/v2/caddyconfig/caddyfile" + "github.com/dunglas/frankenphp" +) + +const defaultHotReloadPattern = "./**/*.{css,env,gif,htm,html,jpg,jpeg,js,mjs,php,png,svg,twig,webp,xml,yaml,yml}" + +type hotReloadContext struct { + // HotReload specifies files to watch for file changes to trigger hot reloads updates. Supports the glob syntax. + HotReload *hotReloadConfig `json:"hot_reload,omitempty"` +} + +type hotReloadConfig struct { + Topic string `json:"topic"` + Watch []string `json:"watch"` +} + +func (f *FrankenPHPModule) configureHotReload(app *FrankenPHPApp) error { + if f.HotReload == nil { + return nil + } + + if f.mercureHub == nil { + return errors.New("unable to enable hot reloading: no Mercure hub configured") + } + + if len(f.HotReload.Watch) == 0 { + f.HotReload.Watch = []string{defaultHotReloadPattern} + } + + if f.HotReload.Topic == "" { + uid, err := uniqueID(f) + if err != nil { + return err + } + + f.HotReload.Topic = "https://frankenphp.dev/hot-reload/" + uid + } + + app.opts = append(app.opts, frankenphp.WithHotReload(f.HotReload.Topic, f.mercureHub, f.HotReload.Watch)) + f.preparedEnv["FRANKENPHP_HOT_RELOAD\x00"] = "/.well-known/mercure?topic=" + url.QueryEscape(f.HotReload.Topic) + + return nil +} + +func (f *FrankenPHPModule) unmarshalHotReload(d *caddyfile.Dispenser) error { + patterns := d.RemainingArgs() + if len(patterns) > 0 { + f.HotReload = &hotReloadConfig{ + Watch: patterns, + } + } + + for d.NextBlock(1) { + switch v := d.Val(); v { + case "topic": + if !d.NextArg() { + return d.ArgErr() + } + + if f.HotReload == nil { + f.HotReload = &hotReloadConfig{} + } + + f.HotReload.Topic = d.Val() + + case "watch": + patterns := d.RemainingArgs() + if len(patterns) == 0 { + return d.ArgErr() + } + + if f.HotReload == nil { + f.HotReload = &hotReloadConfig{} + } + + f.HotReload.Watch = append(f.HotReload.Watch, patterns...) + + default: + return wrongSubDirectiveError("hot_reload", "topic, watch", v) + } + } + + return nil +} + +func uniqueID(s any) (string, error) { + var b bytes.Buffer + + if err := gob.NewEncoder(&b).Encode(s); err != nil { + return "", fmt.Errorf("unable to generate unique name: %w", err) + } + + h := fnv.New64a() + if _, err := h.Write(b.Bytes()); err != nil { + return "", fmt.Errorf("unable to generate unique name: %w", err) + } + + return fmt.Sprintf("%016x", h.Sum64()), nil +} diff --git a/caddy/hotreload_test.go b/caddy/hotreload_test.go new file mode 100644 index 00000000..59ae4031 --- /dev/null +++ b/caddy/hotreload_test.go @@ -0,0 +1,88 @@ +//go:build !nowatcher && !nomercure + +package caddy_test + +import ( + "context" + "net/http" + "net/url" + "os" + "path/filepath" + "strings" + "sync" + "testing" + + "github.com/caddyserver/caddy/v2/caddytest" + "github.com/stretchr/testify/require" +) + +func TestHotReload(t *testing.T) { + const topic = "https://frankenphp.dev/hot-reload/test" + + u := "/.well-known/mercure?topic=" + url.QueryEscape(topic) + + tmpDir := t.TempDir() + indexFile := filepath.Join(tmpDir, "index.php") + + tester := caddytest.NewTester(t) + tester.InitServer(` + { + debug + skip_install_trust + admin localhost:2999 + } + + http://localhost:`+testPort+` { + mercure { + transport local + subscriber_jwt TestKey + anonymous + } + + php_server { + root `+tmpDir+` + hot_reload { + topic `+topic+` + watch `+tmpDir+`/*.php + } + } + `, "caddyfile") + + var connected, received sync.WaitGroup + + connected.Add(1) + received.Go(func() { + cx, cancel := context.WithCancel(t.Context()) + req, _ := http.NewRequest(http.MethodGet, "http://localhost:"+testPort+u, nil) + req = req.WithContext(cx) + resp := tester.AssertResponseCode(req, http.StatusOK) + + connected.Done() + + var receivedBody strings.Builder + + buf := make([]byte, 1024) + for { + _, err := resp.Body.Read(buf) + require.NoError(t, err) + + receivedBody.Write(buf) + + if strings.Contains(receivedBody.String(), "index.php") { + cancel() + + break + } + } + + require.NoError(t, resp.Body.Close()) + }) + + connected.Wait() + + require.NoError(t, os.WriteFile(indexFile, []byte(" len(partsToMatch) { + return false + } + + cursor = j + subPattern := strings.Join(partsToMatch[j:j+patternSize], string(filepath.Separator)) + + if matchBracketPattern(pattern, subPattern) { + cursor = j + patternSize - 1 + + break + } + + if cursor > len(partsToMatch)-patternSize-1 { + return false + } + } + } + + return true +} + +// we also check for the following bracket syntax: /path/*.{php,twig,yaml} +func matchBracketPattern(pattern string, fileName string) bool { + openingBracket := strings.Index(pattern, "{") + closingBracket := strings.Index(pattern, "}") + + // if there are no brackets we can match regularly + if openingBracket == -1 || closingBracket == -1 { + return matchPattern(pattern, fileName) + } + + beforeTheBrackets := pattern[:openingBracket] + betweenTheBrackets := pattern[openingBracket+1 : closingBracket] + afterTheBrackets := pattern[closingBracket+1:] + + // all bracket entries are checked individually, only one needs to match + // *.{php,twig,yaml} -> *.php, *.twig, *.yaml + for pattern := range strings.SplitSeq(betweenTheBrackets, ",") { + if matchPattern(beforeTheBrackets+pattern+afterTheBrackets, fileName) { + return true + } + } + + return false +} + +func matchPattern(pattern string, fileName string) bool { + if pattern == "" { + return true + } + + patternMatches, err := filepath.Match(pattern, fileName) + + if err != nil { + if globalLogger.Enabled(globalCtx, slog.LevelError) { + globalLogger.LogAttrs(globalCtx, slog.LevelError, "failed to match filename", slog.String("file", fileName), slog.Any("error", err)) + } + + return false + } + + return patternMatches +} diff --git a/internal/watcher/pattern_test.go b/internal/watcher/pattern_test.go new file mode 100644 index 00000000..eddc3b33 --- /dev/null +++ b/internal/watcher/pattern_test.go @@ -0,0 +1,350 @@ +//go:build !nowatcher + +package watcher + +import ( + "path/filepath" + "testing" + + "github.com/e-dant/watcher/watcher-go" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDisallowOnEventTypeBiggerThan3(t *testing.T) { + w := pattern{value: "/some/path"} + require.NoError(t, w.parse()) + + assert.False(t, w.allowReload(&watcher.Event{PathName: "/some/path/watch-me.php", EffectType: watcher.EffectTypeOwner})) +} + +func TestDisallowOnPathTypeBiggerThan2(t *testing.T) { + w := pattern{value: "/some/path"} + require.NoError(t, w.parse()) + + assert.False(t, w.allowReload(&watcher.Event{PathName: "/some/path/watch-me.php", PathType: watcher.PathTypeSymLink})) +} + +func TestWatchesCorrectDir(t *testing.T) { + t.Parallel() + + data := []struct { + pattern string + dir string + }{ + {"/path", "/path"}, + {"/path/", "/path"}, + {"/path/**/*.php", "/path"}, + {"/path/*.php", "/path"}, + {"/path/*/*.php", "/path"}, + {"/path/?path/*.php", "/path"}, + {"/path/{dir1,dir2}/**/*.php", "/path"}, + {".", relativeDir(t, "")}, + {"./", relativeDir(t, "")}, + {"./**", relativeDir(t, "")}, + {"..", relativeDir(t, "/..")}, + } + + for _, d := range data { + t.Run(d.pattern, func(t *testing.T) { + t.Parallel() + + hasDir(t, d.pattern, d.dir) + }) + } +} + +func TestValidRecursiveDirectories(t *testing.T) { + t.Parallel() + + data := []struct { + pattern string + dir string + }{ + {"/path", "/path/file.php"}, + {"/path", "/path/subpath/file.php"}, + {"/path/", "/path/subpath/file.php"}, + {"/path**", "/path/subpath/file.php"}, + {"/path/**", "/path/subpath/file.php"}, + {"/path/**/", "/path/subpath/file.php"}, + {".", relativeDir(t, "file.php")}, + {".", relativeDir(t, "subpath/file.php")}, + {"./**", relativeDir(t, "subpath/file.php")}, + {"..", relativeDir(t, "subpath/file.php")}, + } + + for _, d := range data { + t.Run(d.pattern, func(t *testing.T) { + t.Parallel() + + shouldMatch(t, d.pattern, d.dir) + }) + } +} + +func TestInvalidRecursiveDirectories(t *testing.T) { + t.Parallel() + + data := []struct { + pattern string + dir string + }{ + {"/path", "/other/file.php"}, + {"/path/**", "/other/file.php"}, + {".", "/other/file.php"}, + } + + for _, d := range data { + t.Run(d.pattern, func(t *testing.T) { + t.Parallel() + + shouldNotMatch(t, d.pattern, d.dir) + }) + } +} + +func TestValidNonRecursiveFilePatterns(t *testing.T) { + t.Parallel() + + data := []struct { + pattern string + dir string + }{ + {"/*.php", "/file.php"}, + {"/path/*.php", "/path/file.php"}, + {"/path/?ile.php", "/path/file.php"}, + {"/path/file.php", "/path/file.php"}, + {"*.php", relativeDir(t, "file.php")}, + {"./*.php", relativeDir(t, "file.php")}, + } + + for _, d := range data { + t.Run(d.pattern, func(t *testing.T) { + t.Parallel() + + shouldMatch(t, d.pattern, d.dir) + }) + } +} + +func TestInValidNonRecursiveFilePatterns(t *testing.T) { + t.Parallel() + + data := []struct { + pattern string + dir string + }{ + {"/path/*.txt", "/path/file.php"}, + {"/path/*.php", "/path/subpath/file.php"}, + {"/*.php", "/path/file.php"}, + {"*.txt", relativeDir(t, "file.php")}, + {"*.php", relativeDir(t, "subpath/file.php")}, + } + + for _, d := range data { + t.Run(d.pattern, func(t *testing.T) { + t.Parallel() + + shouldNotMatch(t, d.pattern, d.dir) + }) + } +} + +func TestValidRecursiveFilePatterns(t *testing.T) { + t.Parallel() + + data := []struct { + pattern string + dir string + }{ + {"/path/**/*.php", "/path/file.php"}, + {"/path/**/*.php", "/path/subpath/file.php"}, + {"/path/**/?ile.php", "/path/subpath/file.php"}, + {"/path/**/file.php", "/path/subpath/file.php"}, + {"**/*.php", relativeDir(t, "file.php")}, + {"**/*.php", relativeDir(t, "subpath/file.php")}, + {"./**/*.php", relativeDir(t, "subpath/file.php")}, + } + + for _, d := range data { + t.Run(d.pattern, func(t *testing.T) { + t.Parallel() + + shouldMatch(t, d.pattern, d.dir) + }) + } +} + +func TestInvalidRecursiveFilePatterns(t *testing.T) { + t.Parallel() + + data := []struct { + pattern string + dir string + }{ + {"/path/**/*.txt", "/path/file.php"}, + {"/path/**/*.txt", "/other/file.php"}, + {"/path/**/*.txt", "/path/subpath/file.php"}, + {"/path/**/?ilm.php", "/path/subpath/file.php"}, + {"**/*.php", "/other/file.php"}, + {".**/*.php", "/other/file.php"}, + {"./**/*.php", "/other/file.php"}, + {"/a/**/very/long/path.php", "/a/short.php"}, + {"", ""}, + {"/a/**/b/c/d/**/e.php", "/a/x/e.php"}, + } + + for _, d := range data { + t.Run(d.pattern, func(t *testing.T) { + t.Parallel() + + shouldNotMatch(t, d.pattern, d.dir) + }) + } +} + +func TestValidDirectoryPatterns(t *testing.T) { + t.Parallel() + + data := []struct { + pattern string + dir string + }{ + {"/path/*/*.php", "/path/subpath/file.php"}, + {"/path/*/*/*.php", "/path/subpath/subpath/file.php"}, + {"/path/?/*.php", "/path/1/file.php"}, + {"/path/**/vendor/*.php", "/path/vendor/file.php"}, + {"/path/**/vendor/*.php", "/path/subpath/vendor/file.php"}, + {"/path/**/vendor/**/*.php", "/path/vendor/file.php"}, + {"/path/**/vendor/**/*.php", "/path/subpath/subpath/vendor/subpath/subpath/file.php"}, + {"/path/**/vendor/*/*.php", "/path/subpath/subpath/vendor/subpath/file.php"}, + {"/path*/path*/*", "/path1/path2/file.php"}, + } + + for _, d := range data { + t.Run(d.pattern, func(t *testing.T) { + t.Parallel() + + shouldMatch(t, d.pattern, d.dir) + }) + } +} + +func TestInvalidDirectoryPatterns(t *testing.T) { + t.Parallel() + data := []struct { + pattern string + dir string + }{ + {"/path/subpath/*.php", "/path/other/file.php"}, + {"/path/*/*.php", "/path/subpath/subpath/file.php"}, + {"/path/?/*.php", "/path/subpath/file.php"}, + {"/path/*/*/*.php", "/path/subpath/file.php"}, + {"/path/*/*/*.php", "/path/subpath/subpath/subpath/file.php"}, + {"/path/**/vendor/*.php", "/path/subpath/vendor/subpath/file.php"}, + {"/path/**/vendor/*.php", "/path/subpath/file.php"}, + {"/path/**/vendor/**/*.php", "/path/subpath/file.php"}, + {"/path/**/vendor/**/*.txt", "/path/subpath/vendor/subpath/file.php"}, + {"/path/**/vendor/**/*.php", "/path/subpath/subpath/subpath/file.php"}, + {"/path/**/vendor/*/*.php", "/path/subpath/vendor/subpath/subpath/file.php"}, + {"/path*/path*", "/path1/path1/file.php"}, + } + + for _, d := range data { + t.Run(d.pattern, func(t *testing.T) { + t.Parallel() + + shouldNotMatch(t, d.pattern, d.dir) + }) + } +} + +func TestValidExtendedPatterns(t *testing.T) { + data := []struct { + pattern string + dir string + }{ + {"/path/*.{php}", "/path/file.php"}, + {"/path/*.{php,twig}", "/path/file.php"}, + {"/path/*.{php,twig}", "/path/file.twig"}, + {"/path/**/{file.php,file.twig}", "/path/subpath/file.twig"}, + {"/path/{dir1,dir2}/file.php", "/path/dir1/file.php"}, + {"/path/{dir1,dir2}/file.php", "/path/dir2/file.php"}, + {"/app/{app,config,resources}/**/*.php", "/app/app/subpath/file.php"}, + {"/app/{app,config,resources}/**/*.php", "/app/config/subpath/file.php"}, + } + + for _, d := range data { + t.Run(d.pattern, func(t *testing.T) { + t.Parallel() + + shouldMatch(t, d.pattern, d.dir) + }) + } +} + +func TestInvalidExtendedPatterns(t *testing.T) { + data := []struct { + pattern string + dir string + }{ + {"/path/*.{php}", "/path/file.txt"}, + {"/path/*.{php,twig}", "/path/file.txt"}, + {"/path/{file.php,file.twig}", "/path/file.txt"}, + {"/path/{dir1,dir2}/file.php", "/path/dir3/file.php"}, + {"/path/{dir1,dir2}/**/*.php", "/path/dir1/subpath/file.txt"}, + } + + for _, d := range data { + t.Run(d.pattern, func(t *testing.T) { + t.Parallel() + + shouldNotMatch(t, d.pattern, d.dir) + }) + } + +} + +func TestAnAssociatedEventTriggersTheWatcher(t *testing.T) { + w := pattern{value: "/**/*.php"} + require.NoError(t, w.parse()) + w.events = make(chan eventHolder) + + e := &watcher.Event{PathName: "/path/temporary_file", AssociatedPathName: "/path/file.php"} + go w.handle(e) + + assert.Equal(t, e, (<-w.events).event) +} + +func relativeDir(t *testing.T, relativePath string) string { + dir, err := filepath.Abs("./" + relativePath) + assert.NoError(t, err) + return dir +} + +func hasDir(t *testing.T, p string, dir string) { + t.Helper() + + w := pattern{value: p} + require.NoError(t, w.parse()) + + assert.Equal(t, dir, w.value) +} + +func shouldMatch(t *testing.T, p string, fileName string) { + t.Helper() + + w := pattern{value: p} + require.NoError(t, w.parse()) + + assert.True(t, w.allowReload(&watcher.Event{PathName: fileName})) +} + +func shouldNotMatch(t *testing.T, p string, fileName string) { + t.Helper() + + w := pattern{value: p} + require.NoError(t, w.parse()) + + assert.False(t, w.allowReload(&watcher.Event{PathName: fileName})) +} diff --git a/internal/watcher/watch_pattern.go b/internal/watcher/watch_pattern.go deleted file mode 100644 index 37b2fdde..00000000 --- a/internal/watcher/watch_pattern.go +++ /dev/null @@ -1,178 +0,0 @@ -//go:build !nowatcher - -package watcher - -import ( - "log/slog" - "path/filepath" - "strings" - - "github.com/dunglas/frankenphp/internal/fastabs" -) - -type watchPattern struct { - dir string - patterns []string - trigger chan string - failureCount int -} - -func parseFilePatterns(filePatterns []string) ([]*watchPattern, error) { - watchPatterns := make([]*watchPattern, 0, len(filePatterns)) - for _, filePattern := range filePatterns { - watchPattern, err := parseFilePattern(filePattern) - if err != nil { - return nil, err - } - watchPatterns = append(watchPatterns, watchPattern) - } - return watchPatterns, nil -} - -// this method prepares the watchPattern struct for a single file pattern (aka /path/*pattern) -func parseFilePattern(filePattern string) (*watchPattern, error) { - w := &watchPattern{} - - // first we clean the pattern - absPattern, err := fastabs.FastAbs(filePattern) - if err != nil { - return nil, err - } - w.dir = absPattern - - // then we split the pattern to determine where the directory ends and the pattern starts - splitPattern := strings.Split(absPattern, string(filepath.Separator)) - patternWithoutDir := "" - for i, part := range splitPattern { - isFilename := i == len(splitPattern)-1 && strings.Contains(part, ".") - isGlobCharacter := strings.ContainsAny(part, "[*?{") - if isFilename || isGlobCharacter { - patternWithoutDir = filepath.Join(splitPattern[i:]...) - w.dir = filepath.Join(splitPattern[:i]...) - break - } - } - - // now we split the pattern according to the recursive '**' syntax - w.patterns = strings.Split(patternWithoutDir, "**") - for i, pattern := range w.patterns { - w.patterns[i] = strings.Trim(pattern, string(filepath.Separator)) - } - - // finally, we remove the trailing separator and add leading separator - w.dir = string(filepath.Separator) + strings.Trim(w.dir, string(filepath.Separator)) - - return w, nil -} - -func (watchPattern *watchPattern) allowReload(fileName string, eventType int, pathType int) bool { - if !isValidEventType(eventType) || !isValidPathType(pathType, fileName) { - return false - } - - return isValidPattern(fileName, watchPattern.dir, watchPattern.patterns) -} - -// 0:rename,1:modify,2:create,3:destroy,4:owner,5:other, -func isValidEventType(eventType int) bool { - return eventType <= 3 -} - -// 0:dir,1:file,2:hard_link,3:sym_link,4:watcher,5:other, -func isValidPathType(pathType int, fileName string) bool { - if pathType == 4 && logger.Enabled(ctx, slog.LevelDebug) { - logger.LogAttrs(ctx, slog.LevelDebug, "special edant/watcher event", slog.String("fileName", fileName)) - } - - return pathType <= 2 -} - -func isValidPattern(fileName string, dir string, patterns []string) bool { - // first we remove the dir from the pattern - if !strings.HasPrefix(fileName, dir) { - return false - } - - // remove the dir and separator from the filename - fileNameWithoutDir := strings.TrimPrefix(strings.TrimPrefix(fileName, dir), string(filepath.Separator)) - - // if the pattern has size 1 we can match it directly against the filename - if len(patterns) == 1 { - return matchBracketPattern(patterns[0], fileNameWithoutDir) - } - - return matchPatterns(patterns, fileNameWithoutDir) -} - -func matchPatterns(patterns []string, fileName string) bool { - partsToMatch := strings.Split(fileName, string(filepath.Separator)) - cursor := 0 - - // if there are multiple patterns due to '**' we need to match them individually - for i, pattern := range patterns { - patternSize := strings.Count(pattern, string(filepath.Separator)) + 1 - - // if we are at the last pattern we will start matching from the end of the filename - if i == len(patterns)-1 { - cursor = len(partsToMatch) - patternSize - } - - // the cursor will move through the fileName until the pattern matches - for j := cursor; j < len(partsToMatch); j++ { - cursor = j - subPattern := strings.Join(partsToMatch[j:j+patternSize], string(filepath.Separator)) - if matchBracketPattern(pattern, subPattern) { - cursor = j + patternSize - 1 - break - } - if cursor > len(partsToMatch)-patternSize-1 { - return false - } - } - } - - return true -} - -// we also check for the following bracket syntax: /path/*.{php,twig,yaml} -func matchBracketPattern(pattern string, fileName string) bool { - openingBracket := strings.Index(pattern, "{") - closingBracket := strings.Index(pattern, "}") - - // if there are no brackets we can match regularly - if openingBracket == -1 || closingBracket == -1 { - return matchPattern(pattern, fileName) - } - - beforeTheBrackets := pattern[:openingBracket] - betweenTheBrackets := pattern[openingBracket+1 : closingBracket] - afterTheBrackets := pattern[closingBracket+1:] - - // all bracket entries are checked individually, only one needs to match - // *.{php,twig,yaml} -> *.php, *.twig, *.yaml - for pattern := range strings.SplitSeq(betweenTheBrackets, ",") { - if matchPattern(beforeTheBrackets+pattern+afterTheBrackets, fileName) { - return true - } - } - - return false -} - -func matchPattern(pattern string, fileName string) bool { - if pattern == "" { - return true - } - - patternMatches, err := filepath.Match(pattern, fileName) - - if err != nil { - if logger.Enabled(ctx, slog.LevelError) { - logger.LogAttrs(ctx, slog.LevelError, "failed to match filename", slog.String("file", fileName), slog.Any("error", err)) - } - - return false - } - - return patternMatches -} diff --git a/internal/watcher/watch_pattern_test.go b/internal/watcher/watch_pattern_test.go deleted file mode 100644 index 748cf4b0..00000000 --- a/internal/watcher/watch_pattern_test.go +++ /dev/null @@ -1,187 +0,0 @@ -//go:build !nowatcher - -package watcher - -import ( - "path/filepath" - "testing" - "time" - - "github.com/stretchr/testify/assert" -) - -func TestDisallowOnEventTypeBiggerThan3(t *testing.T) { - const fileName = "/some/path/watch-me.php" - const eventType = 4 - - watchPattern, err := parseFilePattern("/some/path") - - assert.NoError(t, err) - assert.False(t, watchPattern.allowReload(fileName, eventType, 0)) -} - -func TestDisallowOnPathTypeBiggerThan2(t *testing.T) { - const fileName = "/some/path/watch-me.php" - const pathType = 3 - - watchPattern, err := parseFilePattern("/some/path") - - assert.NoError(t, err) - assert.False(t, watchPattern.allowReload(fileName, 0, pathType)) -} - -func TestWatchesCorrectDir(t *testing.T) { - hasDir(t, "/path", "/path") - hasDir(t, "/path/", "/path") - hasDir(t, "/path/**/*.php", "/path") - hasDir(t, "/path/*.php", "/path") - hasDir(t, "/path/*/*.php", "/path") - hasDir(t, "/path/?dir/*.php", "/path") - hasDir(t, "/path/{dir1,dir2}/**/*.php", "/path") - hasDir(t, ".", relativeDir(t, "")) - hasDir(t, "./", relativeDir(t, "")) - hasDir(t, "./**", relativeDir(t, "")) - hasDir(t, "..", relativeDir(t, "/..")) -} - -func TestValidRecursiveDirectories(t *testing.T) { - shouldMatch(t, "/path", "/path/file.php") - shouldMatch(t, "/path", "/path/subpath/file.php") - shouldMatch(t, "/path/", "/path/subpath/file.php") - shouldMatch(t, "/path**", "/path/subpath/file.php") - shouldMatch(t, "/path/**", "/path/subpath/file.php") - shouldMatch(t, "/path/**/", "/path/subpath/file.php") - shouldMatch(t, ".", relativeDir(t, "file.php")) - shouldMatch(t, ".", relativeDir(t, "subpath/file.php")) - shouldMatch(t, "./**", relativeDir(t, "subpath/file.php")) - shouldMatch(t, "..", relativeDir(t, "subpath/file.php")) -} - -func TestInvalidRecursiveDirectories(t *testing.T) { - shouldNotMatch(t, "/path", "/other/file.php") - shouldNotMatch(t, "/path/**", "/other/file.php") - shouldNotMatch(t, ".", "/other/file.php") -} - -func TestValidNonRecursiveFilePatterns(t *testing.T) { - shouldMatch(t, "/*.php", "/file.php") - shouldMatch(t, "/path/*.php", "/path/file.php") - shouldMatch(t, "/path/?ile.php", "/path/file.php") - shouldMatch(t, "/path/file.php", "/path/file.php") - shouldMatch(t, "*.php", relativeDir(t, "file.php")) - shouldMatch(t, "./*.php", relativeDir(t, "file.php")) -} - -func TestInValidNonRecursiveFilePatterns(t *testing.T) { - shouldNotMatch(t, "/path/*.txt", "/path/file.php") - shouldNotMatch(t, "/path/*.php", "/path/subpath/file.php") - shouldNotMatch(t, "/*.php", "/path/file.php") - shouldNotMatch(t, "*.txt", relativeDir(t, "file.php")) - shouldNotMatch(t, "*.php", relativeDir(t, "subpath/file.php")) -} - -func TestValidRecursiveFilePatterns(t *testing.T) { - shouldMatch(t, "/path/**/*.php", "/path/file.php") - shouldMatch(t, "/path/**/*.php", "/path/subpath/file.php") - shouldMatch(t, "/path/**/?ile.php", "/path/subpath/file.php") - shouldMatch(t, "/path/**/file.php", "/path/subpath/file.php") - shouldMatch(t, "**/*.php", relativeDir(t, "file.php")) - shouldMatch(t, "**/*.php", relativeDir(t, "subpath/file.php")) - shouldMatch(t, "./**/*.php", relativeDir(t, "subpath/file.php")) -} - -func TestInvalidRecursiveFilePatterns(t *testing.T) { - shouldNotMatch(t, "/path/**/*.txt", "/path/file.php") - shouldNotMatch(t, "/path/**/*.txt", "/other/file.php") - shouldNotMatch(t, "/path/**/*.txt", "/path/subpath/file.php") - shouldNotMatch(t, "/path/**/?ilm.php", "/path/subpath/file.php") - shouldNotMatch(t, "**/*.php", "/other/file.php") - shouldNotMatch(t, ".**/*.php", "/other/file.php") - shouldNotMatch(t, "./**/*.php", "/other/file.php") -} - -func TestValidDirectoryPatterns(t *testing.T) { - shouldMatch(t, "/path/*/*.php", "/path/subpath/file.php") - shouldMatch(t, "/path/*/*/*.php", "/path/subpath/subpath/file.php") - shouldMatch(t, "/path/?/*.php", "/path/1/file.php") - shouldMatch(t, "/path/**/vendor/*.php", "/path/vendor/file.php") - shouldMatch(t, "/path/**/vendor/*.php", "/path/subpath/vendor/file.php") - shouldMatch(t, "/path/**/vendor/**/*.php", "/path/vendor/file.php") - shouldMatch(t, "/path/**/vendor/**/*.php", "/path/subpath/subpath/vendor/subpath/subpath/file.php") - shouldMatch(t, "/path/**/vendor/*/*.php", "/path/subpath/subpath/vendor/subpath/file.php") - shouldMatch(t, "/path*/path*/*", "/path1/path2/file.php") -} - -func TestInvalidDirectoryPatterns(t *testing.T) { - shouldNotMatch(t, "/path/subpath/*.php", "/path/other/file.php") - shouldNotMatch(t, "/path/*/*.php", "/path/subpath/subpath/file.php") - shouldNotMatch(t, "/path/?/*.php", "/path/subpath/file.php") - shouldNotMatch(t, "/path/*/*/*.php", "/path/subpath/file.php") - shouldNotMatch(t, "/path/*/*/*.php", "/path/subpath/subpath/subpath/file.php") - shouldNotMatch(t, "/path/**/vendor/*.php", "/path/subpath/vendor/subpath/file.php") - shouldNotMatch(t, "/path/**/vendor/*.php", "/path/subpath/file.php") - shouldNotMatch(t, "/path/**/vendor/**/*.php", "/path/subpath/file.php") - shouldNotMatch(t, "/path/**/vendor/**/*.txt", "/path/subpath/vendor/subpath/file.php") - shouldNotMatch(t, "/path/**/vendor/**/*.php", "/path/subpath/subpath/subpath/file.php") - shouldNotMatch(t, "/path/**/vendor/*/*.php", "/path/subpath/vendor/subpath/subpath/file.php") - shouldNotMatch(t, "/path*/path*", "/path1/path1/file.php") -} - -func TestValidExtendedPatterns(t *testing.T) { - shouldMatch(t, "/path/*.{php}", "/path/file.php") - shouldMatch(t, "/path/*.{php,twig}", "/path/file.php") - shouldMatch(t, "/path/*.{php,twig}", "/path/file.twig") - shouldMatch(t, "/path/**/{file.php,file.twig}", "/path/subpath/file.twig") - shouldMatch(t, "/path/{dir1,dir2}/file.php", "/path/dir1/file.php") - shouldMatch(t, "/path/{dir1,dir2}/file.php", "/path/dir2/file.php") - shouldMatch(t, "/app/{app,config,resources}/**/*.php", "/app/app/subpath/file.php") - shouldMatch(t, "/app/{app,config,resources}/**/*.php", "/app/config/subpath/file.php") -} - -func TestInValidExtendedPatterns(t *testing.T) { - shouldNotMatch(t, "/path/*.{php}", "/path/file.txt") - shouldNotMatch(t, "/path/*.{php,twig}", "/path/file.txt") - shouldNotMatch(t, "/path/{file.php,file.twig}", "/path/file.txt") - shouldNotMatch(t, "/path/{dir1,dir2}/file.php", "/path/dir3/file.php") - shouldNotMatch(t, "/path/{dir1,dir2}/**/*.php", "/path/dir1/subpath/file.txt") -} - -func TestAnAssociatedEventTriggersTheWatcher(t *testing.T) { - watchPattern, err := parseFilePattern("/**/*.php") - assert.NoError(t, err) - watchPattern.trigger = make(chan string) - - go handleWatcherEvent(watchPattern, "/path/temorary_file", "/path/file.php", 0, 0) - - var path string - select { - case path = <-watchPattern.trigger: - assert.Equal(t, "/path/file.php", path, "should be associated file path") - case <-time.After(2 * time.Second): - assert.Fail(t, "associated watchPattern did not trigger after 2s") - } -} - -func relativeDir(t *testing.T, relativePath string) string { - dir, err := filepath.Abs("./" + relativePath) - assert.NoError(t, err) - return dir -} - -func hasDir(t *testing.T, pattern string, dir string) { - watchPattern, err := parseFilePattern(pattern) - assert.NoError(t, err) - assert.Equal(t, dir, watchPattern.dir) -} - -func shouldMatch(t *testing.T, pattern string, fileName string) { - watchPattern, err := parseFilePattern(pattern) - assert.NoError(t, err) - assert.True(t, watchPattern.allowReload(fileName, 0, 0)) -} - -func shouldNotMatch(t *testing.T, pattern string, fileName string) { - watchPattern, err := parseFilePattern(pattern) - assert.NoError(t, err) - assert.False(t, watchPattern.allowReload(fileName, 0, 0)) -} diff --git a/internal/watcher/watcher-skip.go b/internal/watcher/watcher-skip.go deleted file mode 100644 index f801e3e1..00000000 --- a/internal/watcher/watcher-skip.go +++ /dev/null @@ -1,17 +0,0 @@ -//go:build nowatcher - -package watcher - -import ( - "context" - "log/slog" -) - -func InitWatcher(ct context.Context, filePatterns []string, callback func(), logger *slog.Logger) error { - logger.Error("watcher support is not enabled") - - return nil -} - -func DrainWatcher() { -} diff --git a/internal/watcher/watcher.c b/internal/watcher/watcher.c deleted file mode 100644 index 8a38a869..00000000 --- a/internal/watcher/watcher.c +++ /dev/null @@ -1,26 +0,0 @@ -// clang-format off -//go:build !nowatcher -// clang-format on -#include "_cgo_export.h" -#include "wtr/watcher-c.h" - -void handle_event(struct wtr_watcher_event event, void *data) { - go_handle_file_watcher_event( - (char *)event.path_name, (char *)event.associated_path_name, - event.effect_type, event.path_type, (uintptr_t)data); -} - -uintptr_t start_new_watcher(char const *const path, uintptr_t data) { - void *watcher = wtr_watcher_open(path, handle_event, (void *)data); - if (watcher == NULL) { - return 0; - } - return (uintptr_t)watcher; -} - -int stop_watcher(uintptr_t watcher) { - if (!wtr_watcher_close((void *)watcher)) { - return 0; - } - return 1; -} diff --git a/internal/watcher/watcher.go b/internal/watcher/watcher.go index c1b87572..61b5fb81 100644 --- a/internal/watcher/watcher.go +++ b/internal/watcher/watcher.go @@ -2,55 +2,60 @@ package watcher -// #include -// #include -// #include "watcher.h" -import "C" import ( "context" "errors" "log/slog" - "runtime/cgo" - "strings" "sync" "sync/atomic" "time" - "unsafe" + + "github.com/e-dant/watcher/watcher-go" ) -type watcher struct { - sessions []C.uintptr_t - callback func() - trigger chan string - stop chan struct{} -} - -// duration to wait before triggering a reload after a file change -const debounceDuration = 150 * time.Millisecond - -// times to retry watching if the watcher was closed prematurely -const maxFailureCount = 5 -const failureResetDuration = 5 * time.Second - -var failureMu = sync.Mutex{} -var watcherIsActive = atomic.Bool{} +const ( + // duration to wait before triggering a reload after a file change + debounceDuration = 150 * time.Millisecond + // times to retry watching if the watcher was closed prematurely + maxFailureCount = 5 + failureResetDuration = 5 * time.Second +) var ( - ErrAlreadyStarted = errors.New("the watcher is already running") - ErrUnableToStartWatching = errors.New("unable to start the watcher") + ErrAlreadyStarted = errors.New("watcher is already running") + + failureMu sync.Mutex + watcherIsActive atomic.Bool // the currently active file watcher - activeWatcher *watcher + activeWatcher *globalWatcher // after stopping the watcher we will wait for eventual reloads to finish reloadWaitGroup sync.WaitGroup // we are passing the context from the main package to the watcher - ctx context.Context - // we are passing the logger from the main package to the watcher - logger *slog.Logger + globalCtx context.Context + // we are passing the globalLogger from the main package to the watcher + globalLogger *slog.Logger ) -func InitWatcher(ct context.Context, filePatterns []string, callback func(), slogger *slog.Logger) error { - if len(filePatterns) == 0 { +type PatternGroup struct { + Patterns []string + Callback func([]*watcher.Event) +} + +type eventHolder struct { + patternGroup *PatternGroup + event *watcher.Event +} + +type globalWatcher struct { + groups []*PatternGroup + watchers []*pattern + events chan eventHolder + stop chan struct{} +} + +func InitWatcher(ct context.Context, slogger *slog.Logger, groups []*PatternGroup) error { + if len(groups) == 0 { return nil } @@ -59,11 +64,22 @@ func InitWatcher(ct context.Context, filePatterns []string, callback func(), slo } watcherIsActive.Store(true) - ctx = ct - logger = slogger - activeWatcher = &watcher{callback: callback} + globalCtx = ct + globalLogger = slogger - if err := activeWatcher.startWatching(ctx, filePatterns); err != nil { + activeWatcher = &globalWatcher{groups: groups} + + for _, g := range groups { + if len(g.Patterns) == 0 { + continue + } + + for _, p := range g.Patterns { + activeWatcher.watchers = append(activeWatcher.watchers, &pattern{patternGroup: g, value: p}) + } + } + + if err := activeWatcher.startWatching(); err != nil { return err } @@ -77,8 +93,8 @@ func DrainWatcher() { watcherIsActive.Store(false) - if logger.Enabled(ctx, slog.LevelDebug) { - logger.LogAttrs(ctx, slog.LevelDebug, "stopping watcher") + if globalLogger.Enabled(globalCtx, slog.LevelDebug) { + globalLogger.LogAttrs(globalCtx, slog.LevelDebug, "stopping watcher") } activeWatcher.stopWatching() @@ -87,142 +103,119 @@ func DrainWatcher() { } // TODO: how to test this? -func retryWatching(watchPattern *watchPattern) { +func (p *pattern) retryWatching() { failureMu.Lock() defer failureMu.Unlock() - if watchPattern.failureCount >= maxFailureCount { - if logger.Enabled(ctx, slog.LevelWarn) { - logger.LogAttrs(ctx, slog.LevelWarn, "giving up watching", slog.String("dir", watchPattern.dir)) + + if p.failureCount >= maxFailureCount { + if globalLogger.Enabled(globalCtx, slog.LevelWarn) { + globalLogger.LogAttrs(globalCtx, slog.LevelWarn, "giving up watching", slog.String("pattern", p.value)) } return } - if logger.Enabled(ctx, slog.LevelInfo) { - logger.LogAttrs(ctx, slog.LevelInfo, "watcher was closed prematurely, retrying...", slog.String("dir", watchPattern.dir)) + if globalLogger.Enabled(globalCtx, slog.LevelInfo) { + globalLogger.LogAttrs(globalCtx, slog.LevelInfo, "watcher was closed prematurely, retrying...", slog.String("pattern", p.value)) } - watchPattern.failureCount++ - session, err := startSession(watchPattern) - if err != nil { - activeWatcher.sessions = append(activeWatcher.sessions, session) - } + p.failureCount++ + + p.startSession() // reset the failure-count if the watcher hasn't reached max failures after 5 seconds go func() { - time.Sleep(failureResetDuration * time.Second) + time.Sleep(failureResetDuration) + failureMu.Lock() - if watchPattern.failureCount < maxFailureCount { - watchPattern.failureCount = 0 + if p.failureCount < maxFailureCount { + p.failureCount = 0 } failureMu.Unlock() }() } -func (w *watcher) startWatching(ctx context.Context, filePatterns []string) error { - w.trigger = make(chan string) - w.stop = make(chan struct{}) - w.sessions = make([]C.uintptr_t, len(filePatterns)) - watchPatterns, err := parseFilePatterns(filePatterns) - if err != nil { +func (g *globalWatcher) startWatching() error { + g.events = make(chan eventHolder) + g.stop = make(chan struct{}) + + if err := g.parseFilePatterns(); err != nil { return err } - for i, watchPattern := range watchPatterns { - watchPattern.trigger = w.trigger - session, err := startSession(watchPattern) - if err != nil { - return err - } - w.sessions[i] = session + + for _, w := range g.watchers { + w.events = g.events + w.startSession() } - go listenForFileEvents(w.trigger, w.stop) + + go g.listenForFileEvents() + return nil } -func (w *watcher) stopWatching() { - close(w.stop) - for _, session := range w.sessions { - stopSession(session) - } -} - -func startSession(w *watchPattern) (C.uintptr_t, error) { - handle := cgo.NewHandle(w) - cDir := C.CString(w.dir) - defer C.free(unsafe.Pointer(cDir)) - watchSession := C.start_new_watcher(cDir, C.uintptr_t(handle)) - if watchSession != 0 { - if logger.Enabled(ctx, slog.LevelDebug) { - logger.LogAttrs(ctx, slog.LevelDebug, "watching", slog.String("dir", w.dir), slog.Any("patterns", w.patterns)) +func (g *globalWatcher) parseFilePatterns() error { + for _, w := range g.watchers { + if err := w.parse(); err != nil { + return err } - - return watchSession, nil } - if logger.Enabled(ctx, slog.LevelError) { - logger.LogAttrs(ctx, slog.LevelError, "couldn't start watching", slog.String("dir", w.dir)) - } - - return watchSession, ErrUnableToStartWatching + return nil } -func stopSession(session C.uintptr_t) { - success := C.stop_watcher(session) - if success == 0 && logger.Enabled(ctx, slog.LevelWarn) { - logger.LogAttrs(ctx, slog.LevelWarn, "couldn't close the watcher") +func (g *globalWatcher) stopWatching() { + close(g.stop) + for _, w := range g.watchers { + w.stop() } } -//export go_handle_file_watcher_event -func go_handle_file_watcher_event(path *C.char, associatedPath *C.char, eventType C.int, pathType C.int, handle C.uintptr_t) { - watchPattern := cgo.Handle(handle).Value().(*watchPattern) - handleWatcherEvent(watchPattern, C.GoString(path), C.GoString(associatedPath), int(eventType), int(pathType)) -} - -func handleWatcherEvent(watchPattern *watchPattern, path string, associatedPath string, eventType int, pathType int) { - // If the watcher prematurely sends the die@ event, retry watching - if pathType == 4 && strings.HasPrefix(path, "e/self/die@") && watcherIsActive.Load() { - retryWatching(watchPattern) - return - } - - if watchPattern.allowReload(path, eventType, pathType) { - watchPattern.trigger <- path - return - } - - // some editors create temporary files and never actually modify the original file - // so we need to also check the associated path of an event - // see https://github.com/php/frankenphp/issues/1375 - if associatedPath != "" && watchPattern.allowReload(associatedPath, eventType, pathType) { - watchPattern.trigger <- associatedPath - } -} - -func listenForFileEvents(triggerWatcher chan string, stopWatcher chan struct{}) { +func (g *globalWatcher) listenForFileEvents() { timer := time.NewTimer(debounceDuration) timer.Stop() - lastChangedFile := "" + + eventsPerGroup := make(map[*PatternGroup][]*watcher.Event, len(g.groups)) + defer timer.Stop() for { select { - case <-stopWatcher: - case lastChangedFile = <-triggerWatcher: + case <-g.stop: + return + case eh := <-g.events: timer.Reset(debounceDuration) + + eventsPerGroup[eh.patternGroup] = append(eventsPerGroup[eh.patternGroup], eh.event) case <-timer.C: timer.Stop() - if logger.Enabled(ctx, slog.LevelInfo) { - logger.LogAttrs(ctx, slog.LevelInfo, "filesystem change detected", slog.String("file", lastChangedFile)) + if globalLogger.Enabled(globalCtx, slog.LevelInfo) { + var events []*watcher.Event + for _, eventList := range eventsPerGroup { + events = append(events, eventList...) + } + + globalLogger.LogAttrs(globalCtx, slog.LevelInfo, "filesystem changes detected", slog.Any("events", events)) } - scheduleReload() + g.scheduleReload(eventsPerGroup) + eventsPerGroup = make(map[*PatternGroup][]*watcher.Event, len(g.groups)) } } } -func scheduleReload() { +func (g *globalWatcher) scheduleReload(eventsPerGroup map[*PatternGroup][]*watcher.Event) { reloadWaitGroup.Add(1) - activeWatcher.callback() + + // Call callbacks in order + for _, g := range g.groups { + if len(g.Patterns) == 0 { + g.Callback(nil) + } + + if e, ok := eventsPerGroup[g]; ok { + g.Callback(e) + } + } + reloadWaitGroup.Done() } diff --git a/internal/watcher/watcher.h b/internal/watcher/watcher.h deleted file mode 100644 index d492cb81..00000000 --- a/internal/watcher/watcher.h +++ /dev/null @@ -1,6 +0,0 @@ -#include -#include - -uintptr_t start_new_watcher(char const *const path, uintptr_t data); - -int stop_watcher(uintptr_t watcher); diff --git a/mercure-skip.go b/mercure-skip.go index 5db69847..26691068 100644 --- a/mercure-skip.go +++ b/mercure-skip.go @@ -5,14 +5,14 @@ package frankenphp // #include // #include import "C" -import ( - "unsafe" -) type mercureContext struct { } //export go_mercure_publish -func go_mercure_publish(_ C.uintptr_t, _ *C.struct__zval_struct, _ unsafe.Pointer, _ bool, _, _ unsafe.Pointer, _ uint64) (*C.zend_string, C.short) { +func go_mercure_publish(threadIndex C.uintptr_t, topics *C.struct__zval_struct, data *C.zend_string, private bool, id, typ *C.zend_string, retry uint64) (generatedID *C.zend_string, error C.short) { return nil, 3 } + +func (w *worker) configureMercure(_ *workerOpt) { +} diff --git a/mercure.go b/mercure.go index 6c1d7075..737d0e10 100644 --- a/mercure.go +++ b/mercure.go @@ -38,6 +38,7 @@ func go_mercure_publish(threadIndex C.uintptr_t, topics *C.struct__zval_struct, Type: GoString(unsafe.Pointer(typ)), }, Private: private, + Debug: fc.logger.Enabled(ctx, slog.LevelDebug), } zvalType := C.zval_get_type(topics) @@ -71,6 +72,14 @@ func go_mercure_publish(threadIndex C.uintptr_t, topics *C.struct__zval_struct, return (*C.zend_string)(PHPString(u.ID, false)), 0 } +func (w *worker) configureMercure(o *workerOpt) { + if o.mercureHub == nil { + return + } + + w.mercureHub = o.mercureHub +} + // WithMercureHub sets the mercure.Hub to use to publish updates func WithMercureHub(hub *mercure.Hub) RequestOption { return func(o *frankenPHPContext) error { @@ -79,3 +88,14 @@ func WithMercureHub(hub *mercure.Hub) RequestOption { return nil } } + +// WithWorkerMercureHub sets the mercure.Hub in the worker script and used to dispatch hot reloading-related mercure.Update. +func WithWorkerMercureHub(hub *mercure.Hub) WorkerOption { + return func(w *workerOpt) error { + w.mercureHub = hub + + w.requestOptions = append(w.requestOptions, WithMercureHub(hub)) + + return nil + } +} diff --git a/options.go b/options.go index b9751ad8..3b0ea139 100644 --- a/options.go +++ b/options.go @@ -20,6 +20,8 @@ type WorkerOption func(*workerOpt) error // // If you change this, also update the Caddy module and the documentation. type opt struct { + hotReloadOpt + ctx context.Context numThreads int maxThreads int @@ -31,6 +33,8 @@ type opt struct { } type workerOpt struct { + mercureContext + name string fileName string num int diff --git a/phpmainthread_test.go b/phpmainthread_test.go index 90aa2033..b777c339 100644 --- a/phpmainthread_test.go +++ b/phpmainthread_test.go @@ -93,10 +93,15 @@ func TestTransitionAThreadBetween2DifferentWorkers(t *testing.T) { // try all possible handler transitions // takes around 200ms and is supposed to force race conditions func TestTransitionThreadsWhileDoingRequests(t *testing.T) { + t.Cleanup(Shutdown) + + var ( + isDone atomic.Bool + wg sync.WaitGroup + ) + numThreads := 10 numRequestsPerThread := 100 - isDone := atomic.Bool{} - wg := sync.WaitGroup{} worker1Path := testDataPath + "/transition-worker-1.php" worker1Name := "worker-1" worker2Path := testDataPath + "/transition-worker-2.php" @@ -155,7 +160,6 @@ func TestTransitionThreadsWhileDoingRequests(t *testing.T) { // we are finished as soon as all 1000 requests are done wg.Wait() isDone.Store(true) - Shutdown() } func TestFinishBootingAWorkerScript(t *testing.T) { diff --git a/scaling_test.go b/scaling_test.go index da7a4d29..c0f848c4 100644 --- a/scaling_test.go +++ b/scaling_test.go @@ -11,6 +11,8 @@ import ( ) func TestScaleARegularThreadUpAndDown(t *testing.T) { + t.Cleanup(Shutdown) + assert.NoError(t, Init( WithNumThreads(1), WithMaxThreads(2), @@ -25,14 +27,14 @@ func TestScaleARegularThreadUpAndDown(t *testing.T) { assert.IsType(t, ®ularThread{}, autoScaledThread.handler) // on down-scale, the thread will be marked as inactive - setLongWaitTime(autoScaledThread) + setLongWaitTime(t, autoScaledThread) deactivateThreads() assert.IsType(t, &inactiveThread{}, autoScaledThread.handler) - - Shutdown() } func TestScaleAWorkerThreadUpAndDown(t *testing.T) { + t.Cleanup(Shutdown) + workerName := "worker1" workerPath := testDataPath + "/transition-worker-1.php" assert.NoError(t, Init( @@ -53,13 +55,13 @@ func TestScaleAWorkerThreadUpAndDown(t *testing.T) { assert.Equal(t, state.Ready, autoScaledThread.state.Get()) // on down-scale, the thread will be marked as inactive - setLongWaitTime(autoScaledThread) + setLongWaitTime(t, autoScaledThread) deactivateThreads() assert.IsType(t, &inactiveThread{}, autoScaledThread.handler) - - Shutdown() } -func setLongWaitTime(thread *phpThread) { +func setLongWaitTime(t *testing.T, thread *phpThread) { + t.Helper() + thread.state.SetWaitTime(time.Now().Add(-time.Hour)) } diff --git a/watcher-skip.go b/watcher-skip.go new file mode 100644 index 00000000..c01cae22 --- /dev/null +++ b/watcher-skip.go @@ -0,0 +1,23 @@ +//go:build nowatcher + +package frankenphp + +import "errors" + +type hotReloadOpt struct { +} + +var errWatcherNotEnabled = errors.New("watcher support is not enabled") + +func initWatchers(o *opt) error { + for _, o := range o.workers { + if len(o.watch) != 0 { + return errWatcherNotEnabled + } + } + + return nil +} + +func drainWatchers() { +} diff --git a/watcher.go b/watcher.go new file mode 100644 index 00000000..eb2b09e2 --- /dev/null +++ b/watcher.go @@ -0,0 +1,47 @@ +//go:build !nowatcher + +package frankenphp + +import ( + "sync/atomic" + + "github.com/dunglas/frankenphp/internal/watcher" + watcherGo "github.com/e-dant/watcher/watcher-go" +) + +type hotReloadOpt struct { + hotReload []*watcher.PatternGroup +} + +var restartWorkers atomic.Bool + +func initWatchers(o *opt) error { + watchPatterns := make([]*watcher.PatternGroup, 0, len(o.hotReload)) + + for _, o := range o.workers { + if len(o.watch) == 0 { + continue + } + + watcherIsEnabled = true + watchPatterns = append(watchPatterns, &watcher.PatternGroup{Patterns: o.watch, Callback: func(_ []*watcherGo.Event) { + restartWorkers.Store(true) + }}) + } + + if watcherIsEnabled { + watchPatterns = append(watchPatterns, &watcher.PatternGroup{ + Callback: func(_ []*watcherGo.Event) { + if restartWorkers.Swap(false) { + RestartWorkers() + } + }, + }) + } + + return watcher.InitWatcher(globalCtx, globalLogger, append(watchPatterns, o.hotReload...)) +} + +func drainWatchers() { + watcher.DrainWatcher() +} diff --git a/watcher_test.go b/watcher_test.go index f3b2efc6..3e0d9d10 100644 --- a/watcher_test.go +++ b/watcher_test.go @@ -11,6 +11,7 @@ import ( "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) // we have to wait a few milliseconds for the watcher debounce to take effect @@ -41,6 +42,8 @@ func TestWorkersShouldNotReloadOnExcludingPattern(t *testing.T) { } func pollForWorkerReset(t *testing.T, handler func(http.ResponseWriter, *http.Request), limit int) bool { + t.Helper() + // first we make an initial request to start the request counter body, _ := testGet("http://example.com/worker-with-counter.php", handler, t) assert.Equal(t, "requests:1", body) @@ -54,18 +57,19 @@ func pollForWorkerReset(t *testing.T, handler func(http.ResponseWriter, *http.Re return true } } + return false } func updateTestFile(fileName string, content string, t *testing.T) { absFileName, err := filepath.Abs(fileName) - assert.NoError(t, err) + require.NoError(t, err) + dirName := filepath.Dir(absFileName) - if _, err := os.Stat(dirName); os.IsNotExist(err) { + if _, err = os.Stat(dirName); os.IsNotExist(err) { err = os.MkdirAll(dirName, 0700) - assert.NoError(t, err) } - bytes := []byte(content) - err = os.WriteFile(absFileName, bytes, 0644) - assert.NoError(t, err) + require.NoError(t, err) + + require.NoError(t, os.WriteFile(absFileName, []byte(content), 0644)) } diff --git a/worker.go b/worker.go index 4960ecc7..e2c54536 100644 --- a/worker.go +++ b/worker.go @@ -14,11 +14,12 @@ import ( "github.com/dunglas/frankenphp/internal/fastabs" "github.com/dunglas/frankenphp/internal/state" - "github.com/dunglas/frankenphp/internal/watcher" ) // represents a worker script and can have many threads assigned to it type worker struct { + mercureContext + name string fileName string num int @@ -37,25 +38,31 @@ type worker struct { var ( workers []*worker watcherIsEnabled bool - startupFailChan chan (error) + startupFailChan chan error ) func initWorkers(opt []workerOpt) error { + if len(opt) == 0 { + return nil + } + + var ( + workersReady sync.WaitGroup + totalThreadsToStart int + ) + workers = make([]*worker, 0, len(opt)) - directoriesToWatch := getDirectoriesToWatch(opt) - watcherIsEnabled = len(directoriesToWatch) > 0 - totalThreadsToStart := 0 for _, o := range opt { w, err := newWorker(o) if err != nil { return err } + totalThreadsToStart += w.num workers = append(workers, w) } - var workersReady sync.WaitGroup startupFailChan = make(chan error, totalThreadsToStart) for _, w := range workers { @@ -73,22 +80,13 @@ func initWorkers(opt []workerOpt) error { select { case err := <-startupFailChan: - // at least 1 worker has failed, shut down and return an error - Shutdown() + // at least 1 worker has failed, return an error return fmt.Errorf("failed to initialize workers: %w", err) default: // all workers started successfully startupFailChan = nil } - if !watcherIsEnabled { - return nil - } - - if err := watcher.InitWatcher(globalCtx, directoriesToWatch, RestartWorkers, globalLogger); err != nil { - return err - } - return nil } @@ -156,6 +154,8 @@ func newWorker(o workerOpt) (*worker, error) { onThreadShutdown: o.onThreadShutdown, } + w.configureMercure(&o) + w.requestOptions = append( w.requestOptions, WithRequestDocumentRoot(filepath.Dir(o.fileName), false), @@ -175,39 +175,43 @@ func DrainWorkers() { } func drainWorkerThreads() []*phpThread { - ready := sync.WaitGroup{} - drainedThreads := make([]*phpThread, 0) + var ( + ready sync.WaitGroup + drainedThreads []*phpThread + ) + for _, worker := range workers { worker.threadMutex.RLock() ready.Add(len(worker.threads)) + for _, thread := range worker.threads { if !thread.state.RequestSafeStateChange(state.Restarting) { ready.Done() + // no state change allowed == thread is shutting down - // we'll proceed to restart all other threads anyways + // we'll proceed to restart all other threads anyway continue } + close(thread.drainChan) drainedThreads = append(drainedThreads, thread) + go func(thread *phpThread) { thread.state.WaitFor(state.Yielding) ready.Done() }(thread) } + worker.threadMutex.RUnlock() } + ready.Wait() return drainedThreads } -func drainWatcher() { - if watcherIsEnabled { - watcher.DrainWatcher() - } -} - // RestartWorkers attempts to restart all workers gracefully +// All workers must be restarted at the same time to prevent issues with opcache resetting. func RestartWorkers() { // disallow scaling threads while restarting workers scalingMu.Lock() @@ -221,14 +225,6 @@ func RestartWorkers() { } } -func getDirectoriesToWatch(workerOpts []workerOpt) []string { - directoriesToWatch := []string{} - for _, w := range workerOpts { - directoriesToWatch = append(directoriesToWatch, w.watch...) - } - return directoriesToWatch -} - func (worker *worker) attachThread(thread *phpThread) { worker.threadMutex.Lock() worker.threads = append(worker.threads, thread) diff --git a/workerextension_test.go b/workerextension_test.go index b1c3dd0b..e861ec97 100644 --- a/workerextension_test.go +++ b/workerextension_test.go @@ -10,6 +10,8 @@ import ( ) func TestWorkersExtension(t *testing.T) { + t.Cleanup(Shutdown) + readyWorkers := 0 shutdownWorkers := 0 serverStarts := 0