diff --git a/.github/actions/setup_build_env/action.yml b/.github/actions/setup_build_env/action.yml index 773c0f9cb6d..d45668dd4b4 100644 --- a/.github/actions/setup_build_env/action.yml +++ b/.github/actions/setup_build_env/action.yml @@ -14,6 +14,10 @@ inputs: python-version: description: "Python version setup" required: true + node-version: + description: "Node.js version setup" + required: false + default: "22" run-uv-sync: description: "Whether to run uv sync on current dir" required: false @@ -37,7 +41,7 @@ runs: - name: Setup Node uses: actions/setup-node@v4 with: - node-version: 22 + node-version: ${{ inputs.node-version }} - name: Install Dependencies if: inputs.run-uv-sync == 'true' run: uv sync diff --git a/.github/workflows/performance.yml b/.github/workflows/performance.yml index e6cc29dd98a..815322a733b 100644 --- a/.github/workflows/performance.yml +++ b/.github/workflows/performance.yml @@ -44,3 +44,37 @@ jobs: with: mode: instrumentation run: uv run pytest -v tests/benchmarks --codspeed + + lighthouse: + name: Run Lighthouse benchmark + runs-on: ubuntu-22.04 + timeout-minutes: 30 + steps: + - uses: actions/checkout@v4 + with: + fetch-tags: true + fetch-depth: 0 + + - uses: ./.github/actions/setup_build_env + with: + python-version: "3.14" + node-version: "22" + run-uv-sync: true + + - name: Install playwright + run: uv run playwright install chromium --only-shell + + - name: Run Lighthouse benchmark + env: + REFLEX_RUN_LIGHTHOUSE: "1" + run: | + mkdir -p .pytest-tmp/lighthouse + uv run pytest tests/integration/test_lighthouse.py -q -s --tb=no --basetemp=.pytest-tmp/lighthouse + + - name: Upload Lighthouse artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: lighthouse-report + path: .pytest-tmp/lighthouse + if-no-files-found: ignore diff --git a/docs/getting_started/project-structure.md b/docs/getting_started/project-structure.md index b4e38f8a663..5fa1082b7aa 100644 --- a/docs/getting_started/project-structure.md +++ b/docs/getting_started/project-structure.md @@ -64,6 +64,8 @@ Initializing your project creates a directory with the same name as your app. Th Reflex generates a default app within the `{app_name}/{app_name}.py` file. You can modify this file to customize your app. +The starter page also includes explicit page metadata. As you customize the app, update the page `title` and `description` in `app.add_page(...)` or `@rx.page(...)` so your production pages describe your project clearly. + ## Python Project Files `pyproject.toml` defines your Python project metadata and dependencies. `uv add reflex` records the Reflex dependency there before you initialize the app. diff --git a/docs/hosting/self-hosting.md b/docs/hosting/self-hosting.md index cd793f30ade..a8e584009a1 100644 --- a/docs/hosting/self-hosting.md +++ b/docs/hosting/self-hosting.md @@ -43,6 +43,53 @@ the backend (event handlers) will be listening on port `8000`. Because the backend uses websockets, some reverse proxy servers, like [nginx](https://nginx.org/en/docs/http/websocket.html) or [apache](https://httpd.apache.org/docs/2.4/mod/mod_proxy.html#protoupgrade), must be configured to pass the `Upgrade` header to allow backend connectivity. ``` +## Pre-compressed Frontend Assets + +Production builds generate pre-compressed frontend assets so they can be served +without compressing responses on the fly. By default Reflex emits `gzip` +sidecars. You can also opt into Brotli and Zstandard in `rxconfig.py`: + +```python +config = rx.Config( + app_name="your_app_name", + frontend_compression_formats=["gzip", "brotli", "zstd"], +) +``` + +When Reflex serves the compiled frontend itself, it will negotiate +`Accept-Encoding` and serve matching sidecar files directly. If you would rather +have your reverse proxy handle compression itself, set +`frontend_compression_formats=[]` to disable build-time pre-compression. + +If you are serving `.web/build/client` from a reverse proxy, enable its +precompressed-file support: + +### Caddy + +```caddy +example.com { + root * /srv/your-app/.web/build/client + try_files {path} /404.html + file_server { + precompressed zstd br gzip + } +} +``` + +### Nginx + +```nginx +location / { + root /srv/your-app/.web/build/client; + try_files $uri $uri/ /404.html; + gzip_static on; +} +``` + +Nginx supports prebuilt `gzip` files directly. If you also want Brotli or Zstd +at the proxy layer, use the corresponding Nginx modules or handle compression +at a CDN/load-balancer layer instead. + ## Exporting a Static Build Exporting a static build of the frontend allows the app to be served using a diff --git a/docs/pages/overview.md b/docs/pages/overview.md index 439151a5f24..2f852c51060 100644 --- a/docs/pages/overview.md +++ b/docs/pages/overview.md @@ -45,6 +45,26 @@ In this example we create three pages: # Video: Pages and URL Routes ``` +## Page Structure and Accessibility + +For better accessibility and Lighthouse scores, wrap your page content in an `rx.el.main` element. This provides the `
` HTML landmark that screen readers and search engines use to identify the primary content of the page. + +```python +def index(): + return rx.el.main( + navbar(), + rx.container( + rx.heading("Welcome"), + rx.text("Page content here."), + ), + footer(), + ) +``` + +```md alert +# Every page should have exactly one `
` landmark. Without it, accessibility tools like Lighthouse will flag the "Document does not have a main landmark" audit. +``` + ## Page Decorator You can also use the `@rx.page` decorator to add a page. @@ -207,6 +227,8 @@ You can add page metadata such as: {meta_data} ``` +For production apps, set `title` and `description` explicitly on each public page with `@rx.page(...)` or `app.add_page(...)`. Reflex will use what you provide there, so it is best to treat page metadata as part of the page definition rather than something to fill in later. + ## Getting the Current Page You can access the current page from the `router` attribute in any state. See the [router docs](/docs/utility_methods/router_attributes) for all available attributes. diff --git a/packages/reflex-base/src/reflex_base/.templates/apps/blank/code/blank.py b/packages/reflex-base/src/reflex_base/.templates/apps/blank/code/blank.py index 4d7059db546..a948369c43f 100644 --- a/packages/reflex-base/src/reflex_base/.templates/apps/blank/code/blank.py +++ b/packages/reflex-base/src/reflex_base/.templates/apps/blank/code/blank.py @@ -11,26 +11,37 @@ class State(rx.State): def index() -> rx.Component: # Welcome Page (Index) - return rx.container( - rx.color_mode.button(position="top-right"), - rx.vstack( - rx.heading("Welcome to Reflex!", size="9"), - rx.text( - "Get started by editing ", - rx.code(f"{config.app_name}/{config.app_name}.py"), - size="5", + return rx.el.main( + rx.container( + rx.color_mode.button(position="top-right"), + rx.vstack( + rx.heading("Welcome to Reflex!", size="9"), + rx.text( + "Get started by editing ", + rx.code(f"{config.app_name}/{config.app_name}.py"), + size="5", + ), + rx.button( + rx.link( + "Check out our docs!", + href="https://reflex.dev/docs/getting-started/introduction/", + is_external=True, + underline="none", + ), + as_child=True, + high_contrast=True, + ), + spacing="5", + justify="center", + min_height="85vh", ), - rx.link( - rx.button("Check out our docs!"), - href="https://reflex.dev/docs/getting-started/introduction/", - is_external=True, - ), - spacing="5", - justify="center", - min_height="85vh", ), ) app = rx.App() -app.add_page(index) +app.add_page( + index, + title="Welcome to Reflex", + description="A starter Reflex app.", +) diff --git a/packages/reflex-base/src/reflex_base/.templates/web/compress-static.js b/packages/reflex-base/src/reflex_base/.templates/web/compress-static.js new file mode 100644 index 00000000000..3cb6bf3bbe5 --- /dev/null +++ b/packages/reflex-base/src/reflex_base/.templates/web/compress-static.js @@ -0,0 +1,9 @@ +import { compressDirectory } from "./vite-plugin-compress.js"; + +const [directory, formatsArg = "[]"] = process.argv.slice(2); + +if (!directory) { + throw new Error("Missing static output directory for compression."); +} + +await compressDirectory(directory, JSON.parse(formatsArg)); diff --git a/packages/reflex-base/src/reflex_base/.templates/web/utils/helpers/upload.js b/packages/reflex-base/src/reflex_base/.templates/web/utils/helpers/upload.js index 6d3d146c6c5..dbfcea88718 100644 --- a/packages/reflex-base/src/reflex_base/.templates/web/utils/helpers/upload.js +++ b/packages/reflex-base/src/reflex_base/.templates/web/utils/helpers/upload.js @@ -1,4 +1,3 @@ -import JSON5 from "json5"; import env from "$/env.json"; /** @@ -45,7 +44,7 @@ export const uploadFiles = async ( // So only process _new_ chunks beyond resp_idx. chunks.slice(resp_idx).map((chunk_json) => { try { - const chunk = JSON5.parse(chunk_json); + const chunk = JSON.parse(chunk_json); event_callbacks.map((f, ix) => { f(chunk) .then(() => { diff --git a/packages/reflex-base/src/reflex_base/.templates/web/vite-plugin-compress.js b/packages/reflex-base/src/reflex_base/.templates/web/vite-plugin-compress.js new file mode 100644 index 00000000000..53b3bcc5b10 --- /dev/null +++ b/packages/reflex-base/src/reflex_base/.templates/web/vite-plugin-compress.js @@ -0,0 +1,139 @@ +/* vite-plugin-compress.js + * + * Generate pre-compressed build assets so they can be served directly by + * production static file servers and reverse proxies without on-the-fly + * compression. The default format is gzip, with optional brotli and zstd. + */ + +import * as zlib from "node:zlib"; +import { dirname } from "node:path"; +import { access, readFile, writeFile } from "node:fs/promises"; +import { promisify } from "node:util"; +import { + validateFormats, + outputDirectoryExists, + walkFiles, +} from "./vite-plugin-utils.js"; + +const gzipAsync = promisify(zlib.gzip); +const brotliAsync = + typeof zlib.brotliCompress === "function" + ? promisify(zlib.brotliCompress) + : null; +const zstdAsync = + typeof zlib.zstdCompress === "function" ? promisify(zlib.zstdCompress) : null; + +const COMPRESSIBLE_EXTENSIONS = /\.(js|css|html|json|svg|xml|txt|map|mjs)$/; + +// Only compress files above this size (bytes). Tiny assets rarely benefit, +// but HTML entrypoints are always compressed so their negotiated sidecars exist. +const MIN_SIZE = 256; + +const COMPRESSORS = { + gzip: { + extension: ".gz", + compress: (raw) => gzipAsync(raw, { level: 9 }), + }, + brotli: brotliAsync && { + extension: ".br", + compress: (raw) => + brotliAsync(raw, { + params: { + [zlib.constants.BROTLI_PARAM_QUALITY]: + zlib.constants.BROTLI_MAX_QUALITY ?? 11, + }, + }), + }, + zstd: zstdAsync && { + extension: ".zst", + compress: (raw) => zstdAsync(raw), + }, +}; + +// Concurrency limit for parallel file compression. +const CONCURRENCY = 16; + +function ensureFormatsSupported(formats) { + const unavailableFormats = formats.filter( + (format) => !COMPRESSORS[format]?.compress, + ); + if (unavailableFormats.length > 0) { + throw new Error( + `The configured frontend compression formats are not supported by this Node.js runtime: ${unavailableFormats.join(", ")}`, + ); + } +} + +async function compressFile(filePath, formats) { + const pendingFormats = []; + for (const format of formats) { + const compressor = COMPRESSORS[format]; + try { + await access(filePath + compressor.extension); + } catch { + pendingFormats.push([format, compressor]); + } + } + + if (pendingFormats.length === 0) return; + + const raw = await readFile(filePath); + if (raw.length < MIN_SIZE && !filePath.endsWith(".html")) return; + + await Promise.all( + pendingFormats.map(([_format, compressor]) => { + return compressor + .compress(raw) + .then((compressed) => + writeFile(filePath + compressor.extension, compressed), + ); + }), + ); +} + +export async function compressDirectory(directory, formats = ["gzip"]) { + validateFormats(formats, COMPRESSORS, "frontend compression format"); + ensureFormatsSupported(formats); + + if (!(await outputDirectoryExists(directory))) { + return; + } + + const pending = []; + for await (const filePath of walkFiles(directory)) { + if (!COMPRESSIBLE_EXTENSIONS.test(filePath)) continue; + pending.push(filePath); + } + + for (let i = 0; i < pending.length; i += CONCURRENCY) { + await Promise.all( + pending + .slice(i, i + CONCURRENCY) + .map((file) => compressFile(file, formats)), + ); + } +} + +/** + * Vite plugin that generates pre-compressed files for eligible build assets. + * @param {{ formats?: string[] }} [options] + * @returns {import('vite').Plugin} + */ +export default function compressPlugin(options = {}) { + const formats = options.formats ?? ["gzip"]; + validateFormats(formats, COMPRESSORS, "frontend compression format"); + + return { + name: "vite-plugin-compress", + apply: "build", + enforce: "post", + + async writeBundle(outputOptions) { + const outputDir = + outputOptions.dir ?? + (outputOptions.file ? dirname(outputOptions.file) : null); + if (!outputDir) return; + await compressDirectory(outputDir, formats); + }, + }; +} diff --git a/packages/reflex-base/src/reflex_base/.templates/web/vite-plugin-image-optimize.js b/packages/reflex-base/src/reflex_base/.templates/web/vite-plugin-image-optimize.js new file mode 100644 index 00000000000..955be3274c5 --- /dev/null +++ b/packages/reflex-base/src/reflex_base/.templates/web/vite-plugin-image-optimize.js @@ -0,0 +1,130 @@ +/* vite-plugin-image-optimize.js + * + * Generate optimized image format variants (WebP, AVIF) as sidecar files + * alongside originals during production builds. The server can then use + * Accept-header content negotiation to serve the best format to each client. + */ + +import { dirname, extname } from "node:path"; +import { readFile, writeFile } from "node:fs/promises"; +import { + validateFormats, + outputDirectoryExists, + walkFiles, +} from "./vite-plugin-utils.js"; + +const IMAGE_EXTENSIONS = /\.(png|jpe?g|gif|bmp|tiff?)$/i; + +// Skip images smaller than this — tiny icons/favicons rarely benefit. +const MIN_SIZE = 1024; + +// Limit parallel sharp operations to avoid memory pressure. +const CONCURRENCY = 8; + +const FORMAT_CONFIG = { + webp: { suffix: ".webp", sharpMethod: "webp" }, + avif: { suffix: ".avif", sharpMethod: "avif" }, +}; + +/** + * Process a single image file, generating optimized format variants. + * Never throws — errors for individual images are silently skipped. + */ +async function optimizeImage(sharp, filePath, formats, quality) { + let raw; + try { + raw = await readFile(filePath); + } catch { + return; + } + if (raw.length < MIN_SIZE) return; + + const stem = filePath.replace(/\.[^.]+$/, ""); + + await Promise.all( + formats.map(async (format) => { + const config = FORMAT_CONFIG[format]; + const outputPath = stem + config.suffix; + + try { + const result = await sharp(raw) + [config.sharpMethod]({ quality }) + .toBuffer(); + + if (result.length < raw.length) { + await writeFile(outputPath, result); + } + } catch { + // Skip this format on error (e.g. unsupported input). + } + }), + ); +} + +/** + * Process all images in a directory tree, with bounded concurrency. + */ +async function optimizeDirectory(sharp, directory, formats, quality) { + if (!(await outputDirectoryExists(directory))) return; + + const pending = []; + for await (const filePath of walkFiles(directory)) { + if (!IMAGE_EXTENSIONS.test(filePath)) continue; + + // Don't re-encode files that are already in a target format. + const ext = extname(filePath).toLowerCase(); + if (formats.some((f) => ext === FORMAT_CONFIG[f].suffix)) continue; + + pending.push(filePath); + } + + // Process in batches to bound concurrency. + for (let i = 0; i < pending.length; i += CONCURRENCY) { + await Promise.all( + pending + .slice(i, i + CONCURRENCY) + .map((file) => optimizeImage(sharp, file, formats, quality)), + ); + } +} + +/** + * Vite plugin that generates optimized image format variants for build assets. + * @param {{ formats?: string[], quality?: number }} [options] + * @returns {import('vite').Plugin} + */ +export default function imageOptimizePlugin(options = {}) { + const formats = options.formats ?? ["webp", "avif"]; + validateFormats(formats, FORMAT_CONFIG, "image optimization format"); + const quality = options.quality ?? 80; + + if (formats.length === 0) { + return { name: "vite-plugin-image-optimize", apply: "build" }; + } + + return { + name: "vite-plugin-image-optimize", + apply: "build", + enforce: "post", + + async writeBundle(outputOptions) { + let sharp; + try { + sharp = (await import("sharp")).default; + } catch { + console.warn( + "[vite-plugin-image-optimize] sharp is not available — skipping image optimization. " + + "Install it with: npm install -D sharp", + ); + return; + } + + const outputDir = + outputOptions.dir ?? + (outputOptions.file ? dirname(outputOptions.file) : null); + if (!outputDir) return; + + await optimizeDirectory(sharp, outputDir, formats, quality); + }, + }; +} diff --git a/packages/reflex-base/src/reflex_base/.templates/web/vite-plugin-utils.js b/packages/reflex-base/src/reflex_base/.templates/web/vite-plugin-utils.js new file mode 100644 index 00000000000..becadb79930 --- /dev/null +++ b/packages/reflex-base/src/reflex_base/.templates/web/vite-plugin-utils.js @@ -0,0 +1,35 @@ +/* vite-plugin-utils.js — Shared utilities for Reflex Vite plugins. */ + +import { join } from "node:path"; +import { readdir, stat } from "node:fs/promises"; + +export async function* walkFiles(directory) { + for (const entry of await readdir(directory, { withFileTypes: true })) { + const entryPath = join(directory, entry.name); + if (entry.isDirectory()) { + yield* walkFiles(entryPath); + } else if (entry.isFile()) { + yield entryPath; + } + } +} + +export async function outputDirectoryExists(dir) { + return Boolean( + await stat(dir).catch((error) => + error?.code === "ENOENT" ? null : Promise.reject(error), + ), + ); +} + +/** + * Validate format names against a registry. Config already normalizes — + * this just guards against typos in direct callers of exported functions. + */ +export function validateFormats(formats, registry, label) { + for (const name of formats) { + if (!(name in registry)) { + throw new Error(`Unsupported ${label} "${name}".`); + } + } +} diff --git a/packages/reflex-base/src/reflex_base/compiler/templates.py b/packages/reflex-base/src/reflex_base/compiler/templates.py index b4056227987..5727a200ee0 100644 --- a/packages/reflex-base/src/reflex_base/compiler/templates.py +++ b/packages/reflex-base/src/reflex_base/compiler/templates.py @@ -161,12 +161,79 @@ def document_root_template(*, imports: list[_ImportDict], document: dict[str, An }}""" +def _normalize_window_lib_alias(lib: str) -> str: + """Produce a safe JS identifier for a library path. + + Args: + lib: The library path to normalize. + + Returns: + A JS-safe identifier derived from the library path. + """ + return ( + lib + .replace("$/", "") + .replace("@", "") + .replace("/", "_") + .replace("-", "_") + .replace(".", "_") + ) + + +def _render_window_reflex_block( + window_library_imports: dict[str, set[str] | None], +) -> tuple[str, str]: + """Render the extra imports + useEffect block for window.__reflex. + + External libraries (``@radix-ui/themes`` etc.) use named imports derived + from the app's actual usage so Rolldown can tree-shake unused exports; + a star import would pin the library's entire surface onto the critical + path. Internal ``$/utils/*`` modules still use star imports since their + surface is small and Reflex-controlled. + + Args: + window_library_imports: Mapping from library path to the set of + named exports to expose (external libs) or ``None`` (internal + libs, star import). + + Returns: + A tuple of ``(import_block, useEffect_body)``. Both are empty when + no dynamic components are in play. + """ + if not window_library_imports: + return "", "" + import_lines: list[str] = [] + entries: list[str] = [] + for lib, names in window_library_imports.items(): + alias = f"__reflex_{_normalize_window_lib_alias(lib)}" + if names is None: + import_lines.append(f'import * as {alias} from "{lib}";') + entries.append(f' "{lib}": {alias},') + else: + sorted_names = sorted(names) + specs = ", ".join(f"{n} as {alias}_{n}" for n in sorted_names) + import_lines.append(f'import {{ {specs} }} from "{lib}";') + obj_entries = ", ".join(f"{n}: {alias}_{n}" for n in sorted_names) + entries.append(f' "{lib}": {{ {obj_entries} }},') + if not entries: + return "", "" + import_block = "\n".join(import_lines) + effect = ( + " useEffect(() => {\n" + ' window["__reflex"] = {\n' + f"{chr(10).join(entries)}\n" + " };\n" + " }, []);\n" + ) + return import_block, effect + + def app_root_template( *, imports: list[_ImportDict], custom_codes: Iterable[str], hooks: dict[str, VarData | None], - window_libraries: list[tuple[str, str]], + window_library_imports: dict[str, set[str] | None], render: dict[str, Any], dynamic_imports: set[str], ): @@ -176,7 +243,8 @@ def app_root_template( imports: The list of import statements. custom_codes: The set of custom code snippets. hooks: The dictionary of hooks. - window_libraries: The list of window libraries. + window_library_imports: Per-library named-export surface for + ``window.__reflex`` (see ``collect_window_library_imports``). render: The dictionary of render functions. dynamic_imports: The set of dynamic imports. @@ -188,14 +256,9 @@ def app_root_template( custom_code_str = "\n".join(custom_codes) - import_window_libraries = "\n".join([ - f'import * as {lib_alias} from "{lib_path}";' - for lib_alias, lib_path in window_libraries - ]) - - window_imports_str = "\n".join([ - f' "{lib_path}": {lib_alias},' for lib_alias, lib_path in window_libraries - ]) + window_imports_block, window_reflex_effect = _render_window_reflex_block( + window_library_imports + ) return f""" {imports_str} @@ -204,7 +267,7 @@ def app_root_template( import {{ ThemeProvider }} from '$/utils/react-theme'; import {{ Layout as AppLayout }} from './_document'; import {{ Outlet }} from 'react-router'; -{import_window_libraries} +{window_imports_block} {custom_code_str} @@ -215,14 +278,7 @@ def app_root_template( export function Layout({{children}}) {{ - useEffect(() => {{ - // Make contexts and state objects available globally for dynamic eval'd components - let windowImports = {{ - {window_imports_str} - }}; - window["__reflex"] = windowImports; - }}, []); - +{window_reflex_effect} return jsx(AppLayout, {{}}, jsx(ThemeProvider, {{defaultTheme: defaultColorMode, attribute: "class"}}, jsx(StateProvider, {{}}, @@ -502,6 +558,8 @@ def vite_config_template( experimental_hmr: bool, sourcemap: bool | Literal["inline", "hidden"], allowed_hosts: bool | list[str] = False, + compression_formats: list[str] | None = None, + image_formats: list[str] | None = None, ): """Template for vite.config.js. @@ -512,6 +570,8 @@ def vite_config_template( experimental_hmr: Whether to enable experimental HMR features. sourcemap: The sourcemap configuration. allowed_hosts: Allow all hosts (True), specific hosts (list of strings), or only localhost (False). + compression_formats: Build-time pre-compression formats to emit. + image_formats: Optimized image formats to generate (e.g. ["webp", "avif"]). Returns: Rendered vite.config.js content as string. @@ -526,6 +586,8 @@ def vite_config_template( import {{ reactRouter }} from "@react-router/dev/vite"; import {{ defineConfig }} from "vite"; import safariCacheBustPlugin from "./vite-plugin-safari-cachebust"; +import imageOptimizePlugin from "./vite-plugin-image-optimize"; +import compressPlugin from "./vite-plugin-compress"; // Ensure that bun always uses the react-dom/server.node functions. function alwaysUseReactDomServerNode() {{ @@ -567,8 +629,11 @@ def vite_config_template( alwaysUseReactDomServerNode(), reactRouter(), safariCacheBustPlugin(), + imageOptimizePlugin({{ formats: {json.dumps(image_formats if image_formats is not None else ["webp", "avif"])}, quality: 80 }}), + compressPlugin({{ formats: {json.dumps(compression_formats if compression_formats is not None else ["gzip"])} }}), ].concat({"[fullReload()]" if force_full_reload else "[]"}), build: {{ + target: "es2022", sourcemap: {"true" if sourcemap is True else "false" if sourcemap is False else repr(sourcemap)}, rollupOptions: {{ onwarn(warning, warn) {{ @@ -583,6 +648,30 @@ def vite_config_template( test: /env.json/, name: "reflex-env", }}, + {{ + test: /node_modules\/socket\.io|node_modules\/engine\.io/, + name: "socket-io", + }}, + {{ + test: /node_modules\/@mantine/, + name: "mantine", + }}, + {{ + test: /node_modules\/lucide-react/, + name: "lucide-icons", + }}, + {{ + test: /node_modules\/react-helmet/, + name: "react-helmet", + }}, + {{ + test: /node_modules\/recharts|node_modules\/d3-/, + name: "recharts", + }}, + {{ + test: /node_modules\/@radix-ui\/themes/, + name: "radix-themes", + }}, ], }}, }}, diff --git a/packages/reflex-base/src/reflex_base/components/dynamic.py b/packages/reflex-base/src/reflex_base/components/dynamic.py index 6c2100a40e8..a9a156127ca 100644 --- a/packages/reflex-base/src/reflex_base/components/dynamic.py +++ b/packages/reflex-base/src/reflex_base/components/dynamic.py @@ -36,6 +36,17 @@ def get_cdn_url(lib: str) -> str: ] +# Captured during Component serialization so ``collect_window_library_imports`` +# can expose tags reachable only through eval'd code on ``window.__reflex``; +# without this, ``evalReactComponent`` would fail to resolve them. +dynamic_component_imports: dict[str, set[imports.ImportVar]] = {} + + +def reset_dynamic_component_imports() -> None: + """Clear the captured dynamic-component import set.""" + dynamic_component_imports.clear() + + def bundle_library(component: Union["Component", str]): """Bundle a library with the component. @@ -98,6 +109,11 @@ def make_component(component: Component) -> str: component_imports = component._get_all_imports() compiler._apply_common_imports(component_imports) + for lib, ivs in component_imports.items(): + named = {iv for iv in ivs if iv.tag and not iv.is_default} + if named: + dynamic_component_imports.setdefault(lib, set()).update(named) + imports = {} for lib, names in component_imports.items(): formatted_lib_name = format_library_name(lib) diff --git a/packages/reflex-base/src/reflex_base/config.py b/packages/reflex-base/src/reflex_base/config.py index a3838af9b22..c80103afae7 100644 --- a/packages/reflex-base/src/reflex_base/config.py +++ b/packages/reflex-base/src/reflex_base/config.py @@ -167,6 +167,8 @@ class BaseConfig: cors_allowed_origins: Comma separated list of origins that are allowed to connect to the backend API. vite_allowed_hosts: Allowed hosts for the Vite dev server. Set to True to allow all hosts, or provide a list of hostnames (e.g. ["myservice.local"]) to allow specific ones. Prevents 403 errors in Docker, Codespaces, reverse proxies, etc. react_strict_mode: Whether to use React strict mode. + frontend_compression_formats: Pre-compressed frontend asset formats to generate for production builds. Supported values are "gzip", "brotli", and "zstd". Use an empty list to disable build-time pre-compression. + frontend_image_formats: Optimized image formats to generate as sidecar files alongside originals during production builds. Supported values are "webp" and "avif". The server negotiates the ``Accept`` header and serves the best variant. Use an empty list to disable. frontend_packages: Additional frontend packages to install. state_manager_mode: Indicate which type of state manager to use. redis_lock_expiration: Maximum expiration lock time for redis state manager. @@ -221,6 +223,16 @@ class BaseConfig: react_strict_mode: bool = True + frontend_compression_formats: Annotated[ + list[str], + SequenceOptions(delimiter=",", strip=True), + ] = dataclasses.field(default_factory=lambda: ["gzip"]) + + frontend_image_formats: Annotated[ + list[str], + SequenceOptions(delimiter=",", strip=True), + ] = dataclasses.field(default_factory=lambda: ["webp", "avif"]) + frontend_packages: list[str] = dataclasses.field(default_factory=list) state_manager_mode: constants.StateManagerMode = constants.StateManagerMode.DISK @@ -305,7 +317,7 @@ class Config(BaseConfig): - **App Settings**: `app_name`, `loglevel`, `telemetry_enabled` - **Server**: `frontend_port`, `backend_port`, `api_url`, `cors_allowed_origins` - **Database**: `db_url`, `async_db_url`, `redis_url` - - **Frontend**: `frontend_packages`, `react_strict_mode` + - **Frontend**: `frontend_packages`, `react_strict_mode`, `frontend_compression_formats`, `frontend_image_formats` - **State Management**: `state_manager_mode`, `state_auto_setters` - **Plugins**: `plugins`, `disable_plugins` @@ -345,6 +357,9 @@ def _post_init(self, **kwargs): for key, env_value in env_kwargs.items(): setattr(self, key, env_value) + self._normalize_frontend_compression_formats() + self._normalize_frontend_image_formats() + # Normalize disable_plugins: convert strings and Plugin subclasses to instances. self._normalize_disable_plugins() @@ -415,6 +430,60 @@ def _normalize_disable_plugins(self): ) self.disable_plugins = normalized + @staticmethod + def _normalize_format_list( + formats: list[str], + supported: set[str], + config_key: str, + ) -> list[str]: + """Normalize, deduplicate, and validate a list of format names. + + Args: + formats: The raw format names from config. + supported: Set of valid format names. + config_key: Config field name for error messages. + + Returns: + Normalized list of valid format names. + + Raises: + ConfigError: If an unsupported format is found. + """ + normalized: list[str] = [] + seen: set[str] = set() + + for format_name in formats: + normalized_name = format_name.strip().lower() + if not normalized_name or normalized_name in seen: + continue + if normalized_name not in supported: + supported_str = ", ".join(sorted(supported)) + msg = ( + f"{config_key} contains unsupported format " + f"{format_name!r}. Expected one of: {supported_str}." + ) + raise ConfigError(msg) + normalized.append(normalized_name) + seen.add(normalized_name) + + return normalized + + def _normalize_frontend_compression_formats(self): + """Normalize and validate configured frontend compression formats.""" + self.frontend_compression_formats = self._normalize_format_list( + self.frontend_compression_formats, + {"brotli", "gzip", "zstd"}, + "frontend_compression_formats", + ) + + def _normalize_frontend_image_formats(self): + """Normalize and validate configured frontend image formats.""" + self.frontend_image_formats = self._normalize_format_list( + self.frontend_image_formats, + {"avif", "webp"}, + "frontend_image_formats", + ) + def _add_builtin_plugins(self): """Add the builtin plugins to the config.""" for plugin in _PLUGINS_ENABLED_BY_DEFAULT: diff --git a/packages/reflex-base/src/reflex_base/constants/installer.py b/packages/reflex-base/src/reflex_base/constants/installer.py index 1578ace7f2c..9732cc3f5c1 100644 --- a/packages/reflex-base/src/reflex_base/constants/installer.py +++ b/packages/reflex-base/src/reflex_base/constants/installer.py @@ -139,6 +139,7 @@ def DEPENDENCIES(cls) -> dict[str, str]: "autoprefixer": "10.4.27", "postcss": "8.5.8", "postcss-import": "16.1.1", + "sharp": "0.34.5", "@react-router/dev": _react_router_version, "@react-router/fs-routes": _react_router_version, "vite": "8.0.0", diff --git a/packages/reflex-base/src/reflex_base/plugins/base.py b/packages/reflex-base/src/reflex_base/plugins/base.py index 52dfa8d7805..b38f560e71b 100644 --- a/packages/reflex-base/src/reflex_base/plugins/base.py +++ b/packages/reflex-base/src/reflex_base/plugins/base.py @@ -8,6 +8,7 @@ if TYPE_CHECKING: from reflex.app import App, UnevaluatedPage + from reflex_base.components.component import BaseComponent class CommonContext(TypedDict): @@ -42,6 +43,7 @@ class PreCompileContext(CommonContext): add_save_task: AddTaskProtocol add_modify_task: Callable[[str, Callable[[str], str]], None] unevaluated_pages: Sequence["UnevaluatedPage"] + theme_roots: Sequence["BaseComponent | None"] class PostCompileContext(CommonContext): diff --git a/packages/reflex-base/src/reflex_base/plugins/shared_tailwind.py b/packages/reflex-base/src/reflex_base/plugins/shared_tailwind.py index 62180093ee6..1c915228599 100644 --- a/packages/reflex-base/src/reflex_base/plugins/shared_tailwind.py +++ b/packages/reflex-base/src/reflex_base/plugins/shared_tailwind.py @@ -1,6 +1,7 @@ """Tailwind CSS configuration types for Reflex plugins.""" import dataclasses +import re from collections.abc import Mapping from copy import deepcopy from typing import Any, Literal, TypedDict @@ -9,6 +10,27 @@ from .base import Plugin as PluginBase +_RADIX_IMPORT_RE = re.compile( + r"^@import (?:url\(['\"]|['\"])@radix-ui/themes/[^'\"]+['\"](?:\))?(?:\s+layer\(\w+\))?;\s*\n?", + re.MULTILINE, +) + + +def strip_radix_theme_imports(css: str) -> tuple[str, int]: + """Remove every Radix Themes @import line from a stylesheet. + + Handles both the monolithic ``styles.css`` and the granular per-token + imports emitted by the compiler. + + Args: + css: The stylesheet content. + + Returns: + The stripped content and the number of imports removed. + """ + return _RADIX_IMPORT_RE.subn("", css) + + TailwindPluginImport = TypedDict( "TailwindPluginImport", { diff --git a/packages/reflex-base/src/reflex_base/plugins/tailwind_v3.py b/packages/reflex-base/src/reflex_base/plugins/tailwind_v3.py index d67264fc0e3..7d72486a829 100644 --- a/packages/reflex-base/src/reflex_base/plugins/tailwind_v3.py +++ b/packages/reflex-base/src/reflex_base/plugins/tailwind_v3.py @@ -1,17 +1,23 @@ """Base class for all plugins.""" import dataclasses +from collections.abc import Sequence from pathlib import Path from types import SimpleNamespace +from typing import TYPE_CHECKING from reflex_base.constants.base import Dirs from reflex_base.constants.compiler import Ext, PageNames from reflex_base.plugins.shared_tailwind import ( TailwindConfig, TailwindPlugin, + strip_radix_theme_imports, tailwind_config_js_template, ) +if TYPE_CHECKING: + from reflex_base.components.component import BaseComponent + class Constants(SimpleNamespace): """Tailwind constants.""" @@ -29,7 +35,7 @@ class Constants(SimpleNamespace): ROOT_STYLE_CONTENT = """ @import "tailwindcss/base"; -@import url('{radix_url}'); +{radix_imports} @tailwind components; @tailwind utilities; @@ -54,18 +60,28 @@ def compile_config(config: TailwindConfig): ) -def compile_root_style(): +def compile_root_style( + theme_roots: Sequence["BaseComponent | None"] | None = None, +): """Compile the Tailwind root style. + Args: + theme_roots: Component roots used to detect which Radix color scales are + actually referenced so only those CSS files are imported. + Returns: The compiled Tailwind root style. """ - from reflex.compiler.compiler import RADIX_THEMES_STYLESHEET + from reflex.compiler.compiler import get_radix_themes_stylesheets + radix_imports = "\n".join( + f"@import url('{sheet}');" + for sheet in get_radix_themes_stylesheets(theme_roots) + ) return str( Path(Dirs.STYLES) / Constants.ROOT_STYLE_PATH ), Constants.ROOT_STYLE_CONTENT.format( - radix_url=RADIX_THEMES_STYLESHEET, + radix_imports=radix_imports, ) @@ -121,20 +137,17 @@ def add_tailwind_to_css_file(css_file_content: str) -> str: Returns: The modified css file content. """ - from reflex.compiler.compiler import RADIX_THEMES_STYLESHEET - if Constants.TAILWIND_CSS.splitlines()[0] in css_file_content: return css_file_content - if RADIX_THEMES_STYLESHEET not in css_file_content: + + stripped, count = strip_radix_theme_imports(css_file_content) + if count == 0: print( # noqa: T201 - f"Could not find line with '{RADIX_THEMES_STYLESHEET}' in {Dirs.STYLES}. " + f"Could not find any '@radix-ui/themes' import in {Dirs.STYLES}. " "Please make sure the file exists and is valid." ) return css_file_content - return css_file_content.replace( - f"@import url('{RADIX_THEMES_STYLESHEET}');", - Constants.TAILWIND_CSS, - ) + return stripped.rstrip() + "\n" + Constants.TAILWIND_CSS + "\n" @dataclasses.dataclass @@ -162,7 +175,7 @@ def pre_compile(self, **context): context: The context for the plugin. """ context["add_save_task"](compile_config, self.get_unversioned_config()) - context["add_save_task"](compile_root_style) + context["add_save_task"](compile_root_style, context.get("theme_roots")) context["add_modify_task"](Dirs.POSTCSS_JS, add_tailwind_to_postcss_config) context["add_modify_task"]( str(Path(Dirs.STYLES) / (PageNames.STYLESHEET_ROOT + Ext.CSS)), diff --git a/packages/reflex-base/src/reflex_base/plugins/tailwind_v4.py b/packages/reflex-base/src/reflex_base/plugins/tailwind_v4.py index 4ae637752a1..b9aee70b05b 100644 --- a/packages/reflex-base/src/reflex_base/plugins/tailwind_v4.py +++ b/packages/reflex-base/src/reflex_base/plugins/tailwind_v4.py @@ -1,17 +1,23 @@ """Base class for all plugins.""" import dataclasses +from collections.abc import Sequence from pathlib import Path from types import SimpleNamespace +from typing import TYPE_CHECKING from reflex_base.constants.base import Dirs from reflex_base.constants.compiler import Ext, PageNames from reflex_base.plugins.shared_tailwind import ( TailwindConfig, TailwindPlugin, + strip_radix_theme_imports, tailwind_config_js_template, ) +if TYPE_CHECKING: + from reflex_base.components.component import BaseComponent + class Constants(SimpleNamespace): """Tailwind constants.""" @@ -29,7 +35,7 @@ class Constants(SimpleNamespace): ROOT_STYLE_CONTENT = """@layer theme, base, components, utilities; @import "tailwindcss/theme.css" layer(theme); @import "tailwindcss/preflight.css" layer(base); -@import "{radix_url}" layer(components); +{radix_imports} @import "tailwindcss/utilities.css" layer(utilities); @config "../tailwind.config.js"; """ @@ -53,18 +59,28 @@ def compile_config(config: TailwindConfig): ) -def compile_root_style(): +def compile_root_style( + theme_roots: Sequence["BaseComponent | None"] | None = None, +): """Compile the Tailwind root style. + Args: + theme_roots: Component roots used to detect which Radix color scales are + actually referenced so only those CSS files are imported. + Returns: The compiled Tailwind root style. """ - from reflex.compiler.compiler import RADIX_THEMES_STYLESHEET + from reflex.compiler.compiler import get_radix_themes_stylesheets + radix_imports = "\n".join( + f'@import "{sheet}" layer(components);' + for sheet in get_radix_themes_stylesheets(theme_roots) + ) return str( Path(Dirs.STYLES) / Constants.ROOT_STYLE_PATH ), Constants.ROOT_STYLE_CONTENT.format( - radix_url=RADIX_THEMES_STYLESHEET, + radix_imports=radix_imports, ) @@ -124,20 +140,17 @@ def add_tailwind_to_css_file(css_file_content: str) -> str: Returns: The modified css file content. """ - from reflex.compiler.compiler import RADIX_THEMES_STYLESHEET - if Constants.TAILWIND_CSS.splitlines()[0] in css_file_content: return css_file_content - if RADIX_THEMES_STYLESHEET not in css_file_content: + + stripped, count = strip_radix_theme_imports(css_file_content) + if count == 0: print( # noqa: T201 - f"Could not find line with '{RADIX_THEMES_STYLESHEET}' in {Dirs.STYLES}. " + f"Could not find any '@radix-ui/themes' import in {Dirs.STYLES}. " "Please make sure the file exists and is valid." ) return css_file_content - return css_file_content.replace( - f"@import url('{RADIX_THEMES_STYLESHEET}');", - Constants.TAILWIND_CSS, - ) + return stripped.rstrip() + "\n" + Constants.TAILWIND_CSS + "\n" @dataclasses.dataclass @@ -166,7 +179,7 @@ def pre_compile(self, **context): context: The context for the plugin. """ context["add_save_task"](compile_config, self.get_unversioned_config()) - context["add_save_task"](compile_root_style) + context["add_save_task"](compile_root_style, context.get("theme_roots")) context["add_modify_task"](Dirs.POSTCSS_JS, add_tailwind_to_postcss_config) context["add_modify_task"]( str(Path(Dirs.STYLES) / (PageNames.STYLESHEET_ROOT + Ext.CSS)), diff --git a/packages/reflex-components-core/src/reflex_components_core/core/sticky.py b/packages/reflex-components-core/src/reflex_components_core/core/sticky.py index 3881d77e0dd..c21b35874c0 100644 --- a/packages/reflex-components-core/src/reflex_components_core/core/sticky.py +++ b/packages/reflex-components-core/src/reflex_components_core/core/sticky.py @@ -84,6 +84,8 @@ def create(cls): desktop_only(StickyLabel.create()), href="https://reflex.dev", target="_blank", + aria_label="Built with Reflex", + title="Built with Reflex", width="auto", padding="0.375rem", align="center", diff --git a/packages/reflex-components-radix/src/reflex_components_radix/themes/color_mode.py b/packages/reflex-components-radix/src/reflex_components_radix/themes/color_mode.py index c0d664796ea..575189f8f10 100644 --- a/packages/reflex-components-radix/src/reflex_components_radix/themes/color_mode.py +++ b/packages/reflex-components-radix/src/reflex_components_radix/themes/color_mode.py @@ -147,6 +147,8 @@ def create( props.setdefault("background", "transparent") props.setdefault("color", "inherit") props.setdefault("z_index", "20") + props.setdefault("aria_label", "Toggle color mode") + props.setdefault("title", "Toggle color mode") props.setdefault(":hover", {"cursor": "pointer"}) if allow_system: diff --git a/reflex/app.py b/reflex/app.py index bdbb90bfe40..010b5bad81a 100644 --- a/reflex/app.py +++ b/reflex/app.py @@ -1149,8 +1149,11 @@ def _compile( ReflexRuntimeError: When any page uses state, but no rx.State subclass is defined. FileNotFoundError: When a plugin requires a file that does not exist. """ + from reflex_base.components.dynamic import reset_dynamic_component_imports from reflex_base.utils.exceptions import ReflexRuntimeError + reset_dynamic_component_imports() + self._apply_decorated_pages() self._pages = {} @@ -1412,9 +1415,15 @@ def _submit_work( route, ) - # Compile the root stylesheet with base styles. + # Compile the root stylesheet with base styles. theme_roots lets + # the compiler ship only the Radix color scales that are actually + # referenced by Theme components in the tree. + theme_roots = [self.theme, *self._pages.values()] _submit_work( - compiler.compile_root_stylesheet, self.stylesheets, self.reset_style + compiler.compile_root_stylesheet, + self.stylesheets, + self.reset_style, + theme_roots, ) # Compile the theme. @@ -1438,6 +1447,7 @@ def _submit_work_without_advancing( )) ), unevaluated_pages=list(self._unevaluated_pages.values()), + theme_roots=theme_roots, ) # Wait for all compilation tasks to complete. @@ -1466,9 +1476,17 @@ def _submit_work_without_advancing( self.theme.appearance = None # pyright: ignore[reportAttributeAccessIssue] progress.advance(task) + # Star imports of large libraries (e.g. @radix-ui/themes) defeat + # Rolldown tree-shaking for window.__reflex; pass per-source dicts + # so tags from multiple pages union instead of clobbering. + window_library_imports = compiler.collect_window_library_imports([ + *(p._get_all_imports() for p in self._pages.values()), + app_root._get_all_imports(), + ]) + # Compile the app root. compile_results.append( - compiler.compile_app(app_root), + compiler.compile_app(app_root, window_library_imports), ) progress.advance(task) diff --git a/reflex/compiler/compiler.py b/reflex/compiler/compiler.py index feec49ee69a..7b76652c479 100644 --- a/reflex/compiler/compiler.py +++ b/reflex/compiler/compiler.py @@ -5,6 +5,7 @@ import sys from collections.abc import Callable, Iterable, Sequence from inspect import getmodule +from itertools import chain from pathlib import Path from typing import TYPE_CHECKING, Any @@ -24,7 +25,7 @@ from reflex_base.utils.exceptions import ReflexError from reflex_base.utils.format import to_title_case from reflex_base.utils.imports import ImportVar, ParsedImportDict -from reflex_base.vars.base import LiteralVar, Var +from reflex_base.vars.base import LiteralVar, Var, get_python_literal from reflex_components_core.base.fragment import Fragment from reflex.compiler import templates, utils @@ -65,37 +66,78 @@ def _compile_document_root(root: Component) -> str: ) -def _normalize_library_name(lib: str) -> str: - """Normalize the library name. +# Path-like imports resolve to Reflex-controlled internal modules; non-matching +# imports are external npm libraries where star imports defeat tree-shaking. +_INTERNAL_LIB_PREFIXES = ("$/", "/", ".") + + +def collect_window_library_imports( + import_sources: Iterable[dict[str, Any]], +) -> dict[str, set[str] | None]: + """Build the ``window.__reflex`` surface for runtime-eval'd code. + + Each bundled library gets either a set of named exports (for external libs, + collected from the app's actual static usage so Rolldown can tree-shake) or + ``None`` (for internal Reflex modules, which use a star import). + + External library tags come from two sources: + (1) static page / app-root imports, and + (2) tags captured during compile-time serialization of dynamic Component + values (Component-typed state field defaults, computed Component vars + evaluated when generating the initial state) -- see + ``reflex_base.components.dynamic.dynamic_component_imports``. + + Takes an iterable of import dicts (one per page / app_root / memo group) + instead of a single merged dict because ``dict.update`` loses information + when multiple sources import from the same library. Args: - lib: The library name to normalize. + import_sources: One import dict per source (page, app_root, etc). Returns: - The normalized library name. + Mapping from library path to either the set of named exports to expose + (external libs) or None (internal libs, use star import). """ - if lib == "react": - return "React" - return lib.replace("$/", "").replace("@", "").replace("/", "_").replace("-", "_") - - -def _compile_app(app_root: Component) -> str: + from reflex_base.components.dynamic import ( + bundled_libraries, + dynamic_component_imports, + ) + from reflex_base.utils.format import format_library_name + + per_lib_tags: dict[str, set[str]] = {} + for source in chain(import_sources, (dynamic_component_imports,)): + for imported_lib, import_vars in source.items(): + key = format_library_name(imported_lib) + for iv in import_vars: + if iv.tag and not iv.is_default: + per_lib_tags.setdefault(key, set()).add(iv.tag) + + result: dict[str, set[str] | None] = {} + for lib in bundled_libraries: + if lib.startswith(_INTERNAL_LIB_PREFIXES): + result[lib] = None + continue + tags = per_lib_tags.get(lib) + if tags: + result[lib] = tags + return result + + +def _compile_app( + app_root: Component, + window_library_imports: dict[str, set[str] | None] | None = None, +) -> str: """Compile the app template component. Args: app_root: The app root to compile. + window_library_imports: Per-library named-export surface to expose on + ``window.__reflex`` for dynamic components. Empty/None skips the + bootstrap entirely. Returns: The compiled app. """ - from reflex_base.components.dynamic import bundled_libraries - - window_libraries = [ - (_normalize_library_name(name), name) for name in bundled_libraries - ] - - window_libraries_deduped = list(dict.fromkeys(window_libraries)) - app_root_imports = app_root._get_all_imports() _apply_common_imports(app_root_imports) @@ -103,7 +145,7 @@ def _compile_app(app_root: Component) -> str: imports=utils.compile_imports(app_root_imports), custom_codes=app_root._get_all_custom_code(), hooks=app_root._get_all_hooks(), - window_libraries=window_libraries_deduped, + window_library_imports=window_library_imports or {}, render=app_root.render(), dynamic_imports=app_root._get_all_dynamic_imports(), ) @@ -175,20 +217,24 @@ def _compile_page(component: BaseComponent) -> str: def compile_root_stylesheet( - stylesheets: list[str], reset_style: bool = True + stylesheets: list[str], + reset_style: bool = True, + theme_roots: Sequence[BaseComponent | None] | None = None, ) -> tuple[str, str]: """Compile the root stylesheet. Args: stylesheets: The stylesheets to include in the root stylesheet. reset_style: Whether to include CSS reset for margin and padding. + theme_roots: Component roots to scan for Theme components so only the + used Radix color scales are shipped. Returns: The path and code of the compiled root stylesheet. """ output_path = utils.get_root_stylesheet_path() - code = _compile_root_stylesheet(stylesheets, reset_style) + code = _compile_root_stylesheet(stylesheets, reset_style, theme_roots) return output_path, code @@ -230,13 +276,137 @@ def _validate_stylesheet(stylesheet_full_path: Path, assets_app_path: Path) -> N RADIX_THEMES_STYLESHEET = "@radix-ui/themes/styles.css" +# Granular Radix Themes entry points. Importing these instead of the monolithic +# styles.css lets us drop the ~30 unused color scales (~120KB raw of tokens.css). +# Layout + reset live in tokens/components/utilities, so these three are always needed. +_RADIX_THEMES_TOKENS_BASE = "@radix-ui/themes/tokens/base.css" +_RADIX_THEMES_COMPONENTS = "@radix-ui/themes/components.css" +_RADIX_THEMES_UTILITIES = "@radix-ui/themes/utilities.css" + + +def _radix_color_stylesheet(color: str) -> str: + return f"@radix-ui/themes/tokens/colors/{color}.css" + + +# When gray_color is "auto" or unset, Radix pairs each accent with a specific gray. +# https://www.radix-ui.com/themes/docs/theme/color#natural-pairing +_RADIX_ACCENT_TO_AUTO_GRAY: dict[str, str] = { + "tomato": "mauve", + "red": "mauve", + "ruby": "mauve", + "crimson": "mauve", + "pink": "mauve", + "plum": "mauve", + "purple": "mauve", + "violet": "mauve", + "iris": "slate", + "indigo": "slate", + "blue": "slate", + "sky": "slate", + "cyan": "slate", + "teal": "sage", + "jade": "sage", + "mint": "sage", + "green": "sage", + "grass": "sage", + "orange": "sand", + "amber": "sand", + "yellow": "sand", + "lime": "sand", + "brown": "sand", + "bronze": "sand", + "gold": "sand", + "gray": "gray", +} + + +def _extract_literal_prop(component: Any, prop_name: str) -> str | None: + """Return the literal string for a Theme prop, or None if unresolvable.""" + literal = get_python_literal(getattr(component, prop_name, None)) + return literal if isinstance(literal, str) else None + + +def _walk_components(root: Any) -> Iterable[Any]: + """Yield root and every descendant via .children.""" + stack = [root] + while stack: + node = stack.pop() + yield node + children = getattr(node, "children", None) + if children: + stack.extend(children) + + +def _collect_radix_theme_colors( + roots: Iterable[Any], +) -> tuple[set[str], set[str], bool]: + """Walk component trees for Theme components and collect their colors. -def _compile_root_stylesheet(stylesheets: list[str], reset_style: bool = True) -> str: + Args: + roots: Component trees to walk for Theme components. + + Returns: + A tuple ``(accent_colors, gray_colors, has_dynamic)``. ``has_dynamic`` + is True if any Theme has a non-literal (state-driven) color, in which + case the caller should fall back to the monolithic stylesheet. + """ + accents: set[str] = set() + grays: set[str] = set() + has_dynamic = False + for root in roots: + if root is None: + continue + for node in _walk_components(root): + if getattr(node, "tag", None) != "Theme": + continue + accent = _extract_literal_prop(node, "accent_color") + gray = _extract_literal_prop(node, "gray_color") + # A Theme instance with a prop attribute but non-literal value is dynamic. + if accent is None and getattr(node, "accent_color", None) is not None: + has_dynamic = True + if gray is None and getattr(node, "gray_color", None) is not None: + has_dynamic = True + if accent: + accents.add(accent) + if gray and gray != "auto": + grays.add(gray) + # When gray_color is unset or "auto", Radix pairs the accent with a + # natural gray scale. Ship that scale too. + if accent and (gray is None or gray == "auto"): + grays.add(_RADIX_ACCENT_TO_AUTO_GRAY.get(accent, "gray")) + return accents, grays, has_dynamic + + +def get_radix_themes_stylesheets(roots: Iterable[Any] | None = None) -> list[str]: + """Return the list of Radix Themes stylesheets to import. + + If any Theme component uses a state-driven color, falls back to the + monolithic styles.css so runtime color changes keep working. + """ + if roots is None: + return [RADIX_THEMES_STYLESHEET] + accents, grays, has_dynamic = _collect_radix_theme_colors(roots) + if has_dynamic or not accents: + return [RADIX_THEMES_STYLESHEET] + sheets = [_RADIX_THEMES_TOKENS_BASE] + sheets.extend(_radix_color_stylesheet(c) for c in sorted(grays)) + sheets.extend(_radix_color_stylesheet(c) for c in sorted(accents)) + sheets.extend([_RADIX_THEMES_COMPONENTS, _RADIX_THEMES_UTILITIES]) + return sheets + + +def _compile_root_stylesheet( + stylesheets: list[str], + reset_style: bool = True, + theme_roots: Sequence[BaseComponent | None] | None = None, +) -> str: """Compile the root stylesheet. Args: stylesheets: The stylesheets to include in the root stylesheet. reset_style: Whether to include CSS reset for margin and padding. + theme_roots: Component roots to scan for Theme components so only the + used Radix color scales are shipped. Returns: The compiled root stylesheet. @@ -253,7 +423,7 @@ def _compile_root_stylesheet(stylesheets: list[str], reset_style: bool = True) - sheets.append(f"./{ResetStylesheet.FILENAME}") sheets.extend( - [RADIX_THEMES_STYLESHEET] + get_radix_themes_stylesheets(theme_roots) + [ sheet for plugin in get_config().plugins @@ -521,11 +691,17 @@ def compile_document_root( return output_path, code -def compile_app(app_root: Component) -> tuple[str, str]: +def compile_app( + app_root: Component, + window_library_imports: dict[str, set[str] | None] | None = None, +) -> tuple[str, str]: """Compile the app root. Args: app_root: The app root component to compile. + window_library_imports: Per-library named-export surface for + ``window.__reflex`` (see ``collect_window_library_imports``). Pass + ``None`` to skip emitting ``window.__reflex`` entirely. Returns: The path and code of the compiled app wrapper. @@ -536,7 +712,7 @@ def compile_app(app_root: Component) -> tuple[str, str]: ) # Compile the document root. - code = _compile_app(app_root) + code = _compile_app(app_root, window_library_imports) return output_path, code diff --git a/reflex/testing.py b/reflex/testing.py index 9a3ff023049..ceec435f330 100644 --- a/reflex/testing.py +++ b/reflex/testing.py @@ -13,7 +13,6 @@ import re import signal import socket -import socketserver import subprocess import sys import textwrap @@ -22,7 +21,6 @@ import types from collections.abc import Callable, Coroutine, Sequence from copy import deepcopy -from http.server import SimpleHTTPRequestHandler from importlib.util import find_spec from pathlib import Path from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeVar @@ -33,6 +31,7 @@ from reflex_base.environment import environment from reflex_base.registry import RegistrationContext from reflex_base.utils.types import ASGIApp +from starlette.applications import Starlette from typing_extensions import Self import reflex @@ -46,6 +45,7 @@ from reflex.state import reload_state_module from reflex.utils import console, js_runtimes from reflex.utils.export import export +from reflex.utils.precompressed_staticfiles import PrecompressedStaticFiles from reflex.utils.token_manager import TokenManager try: @@ -801,94 +801,16 @@ def expect( ) -class SimpleHTTPRequestHandlerCustomErrors(SimpleHTTPRequestHandler): - """SimpleHTTPRequestHandler with custom error page handling.""" - - def __init__(self, *args, error_page_map: dict[int, Path], **kwargs): - """Initialize the handler. - - Args: - error_page_map: map of error code to error page path - *args: passed through to superclass - **kwargs: passed through to superclass - """ - self.error_page_map = error_page_map - super().__init__(*args, **kwargs) - - def send_error( - self, code: int, message: str | None = None, explain: str | None = None - ) -> None: - """Send the error page for the given error code. - - If the code matches a custom error page, then message and explain are - ignored. - - Args: - code: the error code - message: the error message - explain: the error explanation - """ - error_page = self.error_page_map.get(code) - if error_page: - self.send_response(code, message) - self.send_header("Connection", "close") - body = error_page.read_bytes() - self.send_header("Content-Type", self.error_content_type) - self.send_header("Content-Length", str(len(body))) - self.end_headers() - self.wfile.write(body) - else: - super().send_error(code, message, explain) - - -class Subdir404TCPServer(socketserver.TCPServer): - """TCPServer for SimpleHTTPRequestHandlerCustomErrors that serves from a subdir.""" - - def __init__( - self, - *args, - root: Path, - error_page_map: dict[int, Path] | None, - **kwargs, - ): - """Initialize the server. - - Args: - root: the root directory to serve from - error_page_map: map of error code to error page path - *args: passed through to superclass - **kwargs: passed through to superclass - """ - self.root = root - self.error_page_map = error_page_map or {} - super().__init__(*args, **kwargs) - - def finish_request(self, request: socket.socket, client_address: tuple[str, int]): - """Finish one request by instantiating RequestHandlerClass. - - Args: - request: the requesting socket - client_address: (host, port) referring to the client's address. - """ - self.RequestHandlerClass( - request, - client_address, - self, - directory=str(self.root), # pyright: ignore [reportCallIssue] - error_page_map=self.error_page_map, # pyright: ignore [reportCallIssue] - ) - - class AppHarnessProd(AppHarness): """AppHarnessProd executes a reflex app in-process for testing. In prod mode, instead of running `react-router dev` the app is exported as static - files and served via the builtin python http.server with custom 404 redirect - handling. Additionally, the backend runs in multi-worker mode. + files and served via Starlette StaticFiles in a dedicated Uvicorn server. + Additionally, the backend runs in multi-worker mode. """ frontend_thread: threading.Thread | None = None - frontend_server: Subdir404TCPServer | None = None + frontend_server: uvicorn.Server | None = None def _run_frontend(self): web_root = ( @@ -896,20 +818,26 @@ def _run_frontend(self): / reflex.utils.prerequisites.get_web_dir() / reflex.constants.Dirs.STATIC ) - config = reflex.config.get_config() - with Subdir404TCPServer( - ("", 0), - SimpleHTTPRequestHandlerCustomErrors, - root=web_root, - error_page_map={ - 404: web_root / config.prepend_frontend_path("/404.html").lstrip("/"), - }, - ) as self.frontend_server: - frontend_path = config.frontend_path.strip("/") - self.frontend_url = "http://localhost:{1}".format( - *self.frontend_server.socket.getsockname() - ) + (f"/{frontend_path}/" if frontend_path else "/") - self.frontend_server.serve_forever() + config = get_config() + frontend_app = Starlette() + frontend_app.mount( + config.prepend_frontend_path("/"), + PrecompressedStaticFiles( + directory=web_root / config.frontend_path.strip("/"), + html=True, + encodings=config.frontend_compression_formats, + image_formats=config.frontend_image_formats, + ), + name="frontend", + ) + self.frontend_server = uvicorn.Server( + uvicorn.Config( + app=frontend_app, + host="127.0.0.1", + port=0, + ) + ) + self.frontend_server.run() def _start_frontend(self): # Set up the frontend. @@ -942,10 +870,24 @@ def _start_frontend(self): self.frontend_thread.start() def _wait_frontend(self): - self._poll_for(lambda: self.frontend_server is not None) - if self.frontend_server is None or not self.frontend_server.socket.fileno(): + self._poll_for( + lambda: ( + self.frontend_server + and getattr(self.frontend_server, "servers", False) + and getattr(self.frontend_server.servers[0], "sockets", False) + ) + ) + if self.frontend_server is None or not self.frontend_server.servers[0].sockets: msg = "Frontend did not start" raise RuntimeError(msg) + frontend_socket = self.frontend_server.servers[0].sockets[0] + if not frontend_socket.fileno(): + msg = "Frontend did not start" + raise RuntimeError(msg) + self.frontend_url = "http://{}:{}".format( + *frontend_socket.getsockname() + ) + get_config().prepend_frontend_path("/") + get_config().deploy_url = self.frontend_url def _start_backend(self): if self.app_asgi is None: @@ -982,9 +924,9 @@ def _poll_for_servers(self, timeout: TimeoutType = None) -> socket.socket: environment.REFLEX_SKIP_COMPILE.set(None) def stop(self): - """Stop the frontend python webserver.""" - super().stop() + """Stop the frontend and backend servers.""" if self.frontend_server is not None: - self.frontend_server.shutdown() + self.frontend_server.should_exit = True + super().stop() if self.frontend_thread is not None: self.frontend_thread.join() diff --git a/reflex/utils/build.py b/reflex/utils/build.py index c25bb5660c1..0553a61387b 100644 --- a/reflex/utils/build.py +++ b/reflex/utils/build.py @@ -2,6 +2,7 @@ from __future__ import annotations +import json import os import zipfile from pathlib import Path, PosixPath @@ -12,6 +13,7 @@ from reflex.utils import console, js_runtimes, path_ops, prerequisites, processes from reflex.utils.exec import is_in_app_harness +from reflex.utils.precompressed_staticfiles import _SUPPORTED_ENCODINGS def set_env_json(): @@ -164,13 +166,16 @@ def zip_app( ) -def _duplicate_index_html_to_parent_directory(directory: Path): +def _duplicate_index_html_to_parent_directory( + directory: Path, suffixes: tuple[str, ...] +): """Duplicate index.html in the child directories to the given directory. This makes accessing /route and /route/ work in production. Args: directory: The directory to duplicate index.html to. + suffixes: Precompressed sidecar suffixes to copy alongside each file. """ for child in directory.iterdir(): if child.is_dir(): @@ -181,10 +186,66 @@ def _duplicate_index_html_to_parent_directory(directory: Path): if not target.exists(): console.debug(f"Copying {index_html} to {target}") path_ops.cp(index_html, target) + _copy_precompressed_sidecars(index_html, target, suffixes) else: console.debug(f"Skipping {index_html}, already exists at {target}") # Recursively call this function for the child directory. - _duplicate_index_html_to_parent_directory(child) + _duplicate_index_html_to_parent_directory(child, suffixes) + + +def _copy_precompressed_sidecars(source: Path, target: Path, suffixes: tuple[str, ...]): + """Copy precompressed sidecars for a file if they exist. + + Args: + source: The original file path. + target: The copied file path. + suffixes: The file suffixes to look for (e.g. ``(".gz",)``). + """ + for suffix in suffixes: + source_sidecar = source.with_name(source.name + suffix) + if not source_sidecar.exists(): + continue + + target_sidecar = target.with_name(target.name + suffix) + console.debug(f"Copying {source_sidecar} to {target_sidecar}") + path_ops.cp(source_sidecar, target_sidecar) + + +def _compress_static_output(directory: Path, formats: tuple[str, ...]) -> None: + """Run the shared frontend compressor against the final static output tree. + + Args: + directory: The static output directory. + formats: The configured frontend compression formats. + + Raises: + SystemExit: If no JavaScript runtime is available or compression fails. + """ + if not formats: + return + + web_dir = prerequisites.get_web_dir().resolve() + runtime = path_ops.get_node_path() or path_ops.get_bun_path() + if runtime is None: + console.error("Node.js or Bun is required to compress the exported frontend.") + raise SystemExit(1) + + result = processes.new_process( + [ + runtime, + web_dir / "compress-static.js", + directory.resolve(), + json.dumps(formats), + ], + cwd=web_dir, + shell=constants.IS_WINDOWS, + run=True, + ) + if result.returncode != 0: + console.error( + "Failed to compress the exported frontend. Please run with --loglevel debug for more information." + ) + raise SystemExit(1) def build(): @@ -226,19 +287,32 @@ def build(): "Failed to build the frontend. Please run with --loglevel debug for more information.", ) raise SystemExit(1) - _duplicate_index_html_to_parent_directory(wdir / constants.Dirs.STATIC) + + config = get_config() + sidecar_suffixes = tuple( + _SUPPORTED_ENCODINGS[fmt].suffix for fmt in config.frontend_compression_formats + ) + + _duplicate_index_html_to_parent_directory( + wdir / constants.Dirs.STATIC, sidecar_suffixes + ) spa_fallback = wdir / constants.Dirs.STATIC / constants.ReactRouter.SPA_FALLBACK if not spa_fallback.exists(): spa_fallback = wdir / constants.Dirs.STATIC / "index.html" if spa_fallback.exists(): + target_404 = wdir / constants.Dirs.STATIC / "404.html" path_ops.cp( spa_fallback, - wdir / constants.Dirs.STATIC / "404.html", + target_404, ) + _copy_precompressed_sidecars(spa_fallback, target_404, sidecar_suffixes) - config = get_config() + _compress_static_output( + wdir / constants.Dirs.STATIC, + tuple(config.frontend_compression_formats), + ) if frontend_path := config.frontend_path.strip("/"): # Create a subdirectory that matches the configured frontend_path. diff --git a/reflex/utils/exec.py b/reflex/utils/exec.py index f2cbf387a10..6d8da54b0ee 100644 --- a/reflex/utils/exec.py +++ b/reflex/utils/exec.py @@ -24,6 +24,7 @@ from reflex.utils import path_ops from reflex.utils.misc import get_module_path +from reflex.utils.precompressed_staticfiles import PrecompressedStaticFiles from reflex.utils.prerequisites import get_web_dir # For uvicorn windows bug fix (#2335) @@ -273,19 +274,17 @@ def get_frontend_mount(): A Mount serving the compiled frontend static files. """ from starlette.routing import Mount - from starlette.staticfiles import StaticFiles - - from reflex.utils import prerequisites config = get_config() + frontend_path = config.frontend_path.strip("/") return Mount( config.prepend_frontend_path("/"), - app=StaticFiles( - directory=prerequisites.get_web_dir() - / constants.Dirs.STATIC - / config.frontend_path.strip("/"), + app=PrecompressedStaticFiles( + directory=get_web_dir() / constants.Dirs.STATIC / frontend_path, html=True, + encodings=config.frontend_compression_formats, + image_formats=config.frontend_image_formats, ), name="frontend", ) diff --git a/reflex/utils/frontend_skeleton.py b/reflex/utils/frontend_skeleton.py index dc60f5ae85b..85540545c48 100644 --- a/reflex/utils/frontend_skeleton.py +++ b/reflex/utils/frontend_skeleton.py @@ -247,6 +247,8 @@ def _compile_vite_config(config: Config): experimental_hmr=environment.VITE_EXPERIMENTAL_HMR.get(), sourcemap=environment.VITE_SOURCEMAP.get(), allowed_hosts=config.vite_allowed_hosts, + compression_formats=config.frontend_compression_formats, + image_formats=config.frontend_image_formats, ) diff --git a/reflex/utils/precompressed_staticfiles.py b/reflex/utils/precompressed_staticfiles.py new file mode 100644 index 00000000000..bfa3e3218f9 --- /dev/null +++ b/reflex/utils/precompressed_staticfiles.py @@ -0,0 +1,377 @@ +"""Serve precompressed static assets when the client supports them.""" + +from __future__ import annotations + +import errno +import os +import stat +from collections.abc import Mapping, Sequence +from dataclasses import dataclass +from mimetypes import guess_type +from pathlib import Path +from typing import TypeVar + +from anyio import to_thread +from starlette.datastructures import URL, Headers +from starlette.exceptions import HTTPException +from starlette.responses import FileResponse, RedirectResponse, Response +from starlette.staticfiles import NotModifiedResponse, StaticFiles +from starlette.types import Scope + + +@dataclass(frozen=True, slots=True) +class _EncodingFormat: + """Mapping between a configured format and its HTTP/static-file details.""" + + name: str + content_encoding: str + suffix: str + + +_SUPPORTED_ENCODINGS = { + "gzip": _EncodingFormat( + name="gzip", + content_encoding="gzip", + suffix=".gz", + ), + "brotli": _EncodingFormat( + name="brotli", + content_encoding="br", + suffix=".br", + ), + "zstd": _EncodingFormat( + name="zstd", + content_encoding="zstd", + suffix=".zst", + ), +} + + +@dataclass(frozen=True, slots=True) +class _ImageFormat: + """Mapping between an image format and its HTTP/static-file details.""" + + name: str + media_type: str + suffix: str + + +_SUPPORTED_IMAGE_FORMATS = { + "webp": _ImageFormat(name="webp", media_type="image/webp", suffix=".webp"), + "avif": _ImageFormat(name="avif", media_type="image/avif", suffix=".avif"), +} + +# Extensions of image files that can have optimized format variants. +_OPTIMIZABLE_IMAGE_EXTENSIONS = frozenset({ + ".png", + ".jpg", + ".jpeg", + ".gif", + ".bmp", + ".tif", + ".tiff", +}) + + +_T = TypeVar("_T") + + +def _resolve_formats( + names: Sequence[str], + registry: Mapping[str, _T], +) -> tuple[_T, ...]: + """Look up format objects by name. Config already validates inputs. + + Args: + names: Validated format names from config. + registry: Mapping of format name to format object. + + Returns: + Format objects in the configured order. + """ + return tuple(registry[n] for n in names if n in registry) + + +def _parse_quality_header(header_value: str | None) -> dict[str, float]: + """Parse a ``token;q=value`` HTTP header (Accept, Accept-Encoding, etc.). + + Args: + header_value: The raw header value. + + Returns: + A mapping of tokens to their quality values. + """ + if not header_value: + return {} + + parsed: dict[str, float] = {} + for entry in header_value.split(","): + token, *params = entry.split(";") + token = token.strip().lower() + if not token: + continue + + quality = 1.0 + for param in params: + key, _, value = param.strip().partition("=") + if key.lower() != "q" or not value: + continue + try: + quality = float(value) + except ValueError: + quality = 0.0 + break + + parsed[token] = max(parsed.get(token, 0.0), quality) + return parsed + + +class PrecompressedStaticFiles(StaticFiles): + """StaticFiles that prefers matching precompressed sidecar files.""" + + def __init__( + self, + *args, + encodings: Sequence[str] = (), + image_formats: Sequence[str] = (), + **kwargs, + ): + """Initialize the static file server. + + Args: + *args: Passed through to ``StaticFiles``. + encodings: Ordered list of supported precompressed formats. + image_formats: Ordered list of optimized image formats to negotiate. + **kwargs: Passed through to ``StaticFiles``. + """ + super().__init__(*args, **kwargs) + self._encodings = _resolve_formats(encodings, _SUPPORTED_ENCODINGS) + self._encoding_suffixes = tuple(fmt.suffix for fmt in self._encodings) + self._image_formats = _resolve_formats(image_formats, _SUPPORTED_IMAGE_FORMATS) + + def _find_precompressed_variant_sync( + self, + path: str, + accepted_encodings: dict[str, float], + ) -> tuple[_EncodingFormat, str, os.stat_result] | None: + """Select the best matching precompressed sidecar for a request path. + + This performs blocking filesystem lookups and must be called via + ``to_thread.run_sync`` from async contexts. + + Args: + path: The requested relative file path. + accepted_encodings: Parsed Accept-Encoding quality values. + + Returns: + The selected encoding format, file path, and stat result, or ``None``. + """ + best_match = None + best_quality = 0.0 + + for encoding in self._encodings: + quality = accepted_encodings.get( + encoding.content_encoding, accepted_encodings.get("*", 0.0) + ) + if quality <= 0: + continue + + full_path, stat_result = self.lookup_path(path + encoding.suffix) + if stat_result is None or not stat.S_ISREG(stat_result.st_mode): + continue + + if quality > best_quality: + best_match = (encoding, full_path, stat_result) + best_quality = quality + if best_quality >= 1.0: + break + + return best_match + + def _find_image_format_variant_sync( + self, + path: str, + accepted_types: dict[str, float], + ) -> tuple[_ImageFormat, str, os.stat_result] | None: + """Select the best matching optimized image variant for a request path. + + This performs blocking filesystem lookups and must be called via + ``to_thread.run_sync`` from async contexts. + + Args: + path: The requested relative file path. + accepted_types: Parsed Accept header quality values. + + Returns: + The selected image format, file path, and stat result, or ``None``. + """ + best_match = None + best_quality = 0.0 + + # Strip the original extension to build sidecar paths. + stem, _dot, _ext = path.rpartition(".") + + for fmt in self._image_formats: + quality = accepted_types.get(fmt.media_type, accepted_types.get("*/*", 0.0)) + if quality <= 0: + continue + + sidecar_path = f"{stem}{fmt.suffix}" + full_path, stat_result = self.lookup_path(sidecar_path) + if stat_result is None or not stat.S_ISREG(stat_result.st_mode): + continue + + if quality > best_quality: + best_match = (fmt, full_path, stat_result) + best_quality = quality + if best_quality >= 1.0: + break + + return best_match + + async def _build_file_response( + self, + *, + path: str, + full_path: str, + stat_result: os.stat_result, + scope: Scope, + status_code: int = 200, + ) -> Response: + """Build a ``FileResponse`` with optional precompressed sidecar support. + + Args: + path: The requested relative file path. + full_path: The resolved on-disk path to the uncompressed file. + stat_result: The stat result for the uncompressed file. + scope: The ASGI request scope. + status_code: The response status code to use. + + Returns: + A file response that serves the best matching asset variant. + """ + request_headers = Headers(scope=scope) + response_headers = {} + response_path = full_path + response_stat = stat_result + media_type = None + vary_parts: list[str] = [] + + # Image format negotiation via Accept header. + if self._image_formats: + ext = Path(path).suffix + if ext.lower() in _OPTIMIZABLE_IMAGE_EXTENSIONS: + accepted_types = _parse_quality_header(request_headers.get("accept")) + if accepted_types: + matched_image = await to_thread.run_sync( + lambda: self._find_image_format_variant_sync( + path, accepted_types + ) + ) + if matched_image: + fmt, response_path, response_stat = matched_image + media_type = fmt.media_type + vary_parts.append("Accept") + + # Encoding negotiation via Accept-Encoding header. + # Skip if image format negotiation already changed the response — the + # precompressed sidecars are keyed to the original path, and modern + # image formats (WebP, AVIF) are already compressed. + if ( + self._encodings + and media_type is None + and not path.endswith(self._encoding_suffixes) + ): + accepted_encodings = _parse_quality_header( + request_headers.get("accept-encoding") + ) + if accepted_encodings: + matched_variant = await to_thread.run_sync( + lambda: self._find_precompressed_variant_sync( + path, accepted_encodings + ) + ) + if matched_variant: + encoding, response_path, response_stat = matched_variant + response_headers["Content-Encoding"] = encoding.content_encoding + media_type = guess_type(path)[0] or "text/plain" + + if self._encodings: + vary_parts.append("Accept-Encoding") + + if vary_parts: + response_headers["Vary"] = ", ".join(vary_parts) + + response = FileResponse( + response_path, + status_code=status_code, + headers=response_headers or None, + media_type=media_type, + stat_result=response_stat, + ) + if self.is_not_modified(response.headers, request_headers): + return NotModifiedResponse(response.headers) + return response + + async def get_response(self, path: str, scope: Scope) -> Response: + """Return the best static response for ``path`` and ``scope``. + + Args: + path: The requested relative file path. + scope: The ASGI request scope. + + Returns: + The resolved static response for the request. + """ + if scope["method"] not in ("GET", "HEAD"): + raise HTTPException(status_code=405) + + try: + full_path, stat_result = await to_thread.run_sync(self.lookup_path, path) + except PermissionError: + raise HTTPException(status_code=401) from None + except OSError as exc: + if exc.errno == errno.ENAMETOOLONG: + raise HTTPException(status_code=404) from None + raise + + if stat_result and stat.S_ISREG(stat_result.st_mode): + return await self._build_file_response( + path=path, + full_path=full_path, + stat_result=stat_result, + scope=scope, + ) + + if stat_result and stat.S_ISDIR(stat_result.st_mode) and self.html: + index_path = str(Path(path) / "index.html") + full_index_path, index_stat_result = await to_thread.run_sync( + self.lookup_path, index_path + ) + if index_stat_result is not None and stat.S_ISREG( + index_stat_result.st_mode + ): + if not scope["path"].endswith("/"): + url = URL(scope=scope) + return RedirectResponse(url=url.replace(path=url.path + "/")) + return await self._build_file_response( + path=index_path, + full_path=full_index_path, + stat_result=index_stat_result, + scope=scope, + ) + + if self.html: + full_404_path, stat_404_result = await to_thread.run_sync( + self.lookup_path, "404.html" + ) + if stat_404_result and stat.S_ISREG(stat_404_result.st_mode): + return await self._build_file_response( + path="404.html", + full_path=full_404_path, + stat_result=stat_404_result, + scope=scope, + status_code=404, + ) + + raise HTTPException(status_code=404) diff --git a/scripts/run_lighthouse.py b/scripts/run_lighthouse.py new file mode 100644 index 00000000000..a61ce3bda42 --- /dev/null +++ b/scripts/run_lighthouse.py @@ -0,0 +1,33 @@ +"""Run the local Lighthouse benchmark with a fresh app build.""" + +from __future__ import annotations + +import shutil +from pathlib import Path + +from tests.integration.lighthouse_utils import ( + LIGHTHOUSE_LANDING_APP_NAME, + run_landing_prod_lighthouse_benchmark, +) + + +def main() -> int: + """Run the Lighthouse benchmark and print a compact summary. + + Returns: + The process exit code. + """ + report_dir = Path(".states") / "lighthouse" + app_root = Path(".states") / LIGHTHOUSE_LANDING_APP_NAME + shutil.rmtree(app_root, ignore_errors=True) + + result = run_landing_prod_lighthouse_benchmark( + app_root=app_root, + report_path=report_dir / "landing-prod-lighthouse.json", + ) + print(result.summary) # noqa: T201 + return 1 if result.failures else 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tests/integration/lighthouse_utils.py b/tests/integration/lighthouse_utils.py new file mode 100644 index 00000000000..2fa895da641 --- /dev/null +++ b/tests/integration/lighthouse_utils.py @@ -0,0 +1,1077 @@ +"""Shared utilities for Lighthouse benchmarking.""" + +from __future__ import annotations + +import json +import operator +import os +import re +import shlex +import shutil +import subprocess +import time +import urllib.request +from dataclasses import dataclass +from functools import cache +from pathlib import Path +from typing import Any +from urllib.parse import urlsplit, urlunsplit + +import pytest + +from reflex.testing import chdir +from reflex.utils.templates import initialize_default_app + +LIGHTHOUSE_RUN_ENV_VAR = "REFLEX_RUN_LIGHTHOUSE" +LIGHTHOUSE_COMMAND_ENV_VAR = "REFLEX_LIGHTHOUSE_COMMAND" +LIGHTHOUSE_CHROME_PATH_ENV_VAR = "REFLEX_LIGHTHOUSE_CHROME_PATH" +LIGHTHOUSE_CLI_PACKAGE = "lighthouse@13.1.0" +LIGHTHOUSE_COMMAND_PREP_TIMEOUT_SECONDS = 300 +LIGHTHOUSE_RUN_TIMEOUT_SECONDS = 300 +TRUTHY_ENV_VALUES = {"1", "true", "yes", "on"} +LIGHTHOUSE_CATEGORY_THRESHOLDS = { + "performance": 0.9, + "accessibility": 0.9, + "best-practices": 0.9, + "seo": 0.9, +} +LIGHTHOUSE_CATEGORIES = tuple(LIGHTHOUSE_CATEGORY_THRESHOLDS) +LIGHTHOUSE_LANDING_APP_NAME = "lighthouse_landing" + +LANDING_PAGE_SOURCE = '''\ +"""A single-page landing page for Lighthouse benchmarking.""" + +import reflex as rx + + +class State(rx.State): + """The app state.""" + + +def navbar() -> rx.Component: + return rx.el.nav( + rx.container( + rx.hstack( + rx.hstack( + rx.icon("hexagon", size=28, color="var(--accent-9)"), + rx.heading("Acme", size="5", weight="bold"), + align="center", + spacing="2", + ), + rx.hstack( + rx.link("Features", href="#features", underline="none", size="3"), + rx.link("How It Works", href="#how-it-works", underline="none", size="3"), + rx.link("Pricing", href="#pricing", underline="none", size="3"), + rx.link("Testimonials", href="#testimonials", underline="none", size="3"), + spacing="5", + display={"base": "none", "md": "flex"}, + ), + rx.button("Sign Up", size="2", high_contrast=True, radius="full"), + justify="between", + align="center", + width="100%", + ), + size="4", + ), + style={ + "position": "sticky", + "top": "0", + "z_index": "50", + "backdrop_filter": "blur(12px)", + "border_bottom": "1px solid var(--gray-a4)", + "padding_top": "12px", + "padding_bottom": "12px", + }, + ) + + +def hero() -> rx.Component: + return rx.section( + rx.container( + rx.vstack( + rx.badge("Now in Public Beta", variant="surface", size="2", radius="full"), + rx.heading( + "Ship products 10x faster ", + rx.text.span("with pure Python", color="var(--accent-9)"), + size="9", + weight="bold", + align="center", + line_height="1.1", + ), + rx.text( + "Stop wrestling with JavaScript. Build beautiful, performant " + "full-stack web apps using nothing but Python. " + "From prototype to production in record time.", + size="5", + align="center", + color="var(--gray-11)", + max_width="640px", + ), + rx.hstack( + rx.button( + rx.icon("arrow-right", size=16), + "Get Started Free", + size="4", + high_contrast=True, + radius="full", + ), + rx.button( + rx.icon("play", size=16), + "Watch Demo", + size="4", + variant="outline", + radius="full", + ), + spacing="3", + ), + rx.hstack( + rx.hstack( + rx.avatar(fallback="A", size="2", radius="full"), + rx.avatar(fallback="B", size="2", radius="full", style={"margin_left": "-8px"}), + rx.avatar(fallback="C", size="2", radius="full", style={"margin_left": "-8px"}), + rx.avatar(fallback="D", size="2", radius="full", style={"margin_left": "-8px"}), + spacing="0", + ), + rx.text( + "Trusted by 50,000+ developers worldwide", + size="2", + color="var(--gray-11)", + ), + align="center", + spacing="3", + pt="2", + ), + spacing="5", + align="center", + py="9", + ), + size="4", + ), + ) + + +def stat_card(value: str, label: str) -> rx.Component: + return rx.vstack( + rx.heading(value, size="8", weight="bold", color="var(--accent-9)"), + rx.text(label, size="3", color="var(--gray-11)"), + align="center", + spacing="1", + ) + + +def stats_bar() -> rx.Component: + return rx.section( + rx.container( + rx.grid( + stat_card("50K+", "Developers"), + stat_card("10M+", "Apps Built"), + stat_card("99.9%", "Uptime"), + stat_card("150+", "Components"), + columns="4", + spacing="6", + width="100%", + ), + size="4", + ), + style={ + "background": "var(--accent-2)", + "border_top": "1px solid var(--gray-a4)", + "border_bottom": "1px solid var(--gray-a4)", + }, + ) + + +def feature_card(icon_name: str, title: str, description: str) -> rx.Component: + return rx.card( + rx.vstack( + rx.flex( + rx.icon(icon_name, size=24, color="var(--accent-9)"), + align="center", + justify="center", + style={ + "width": "48px", + "height": "48px", + "border_radius": "12px", + "background": "var(--accent-3)", + }, + ), + rx.heading(title, size="4", weight="bold"), + rx.text(description, size="3", color="var(--gray-11)", line_height="1.6"), + spacing="3", + ), + size="3", + ) + + +def features() -> rx.Component: + return rx.section( + rx.container( + rx.vstack( + rx.badge("Features", variant="surface", size="2", radius="full"), + rx.heading("Everything you need to build", size="8", weight="bold", align="center"), + rx.text( + "A complete toolkit for modern web development, " + "designed for developers who value productivity.", + size="4", + color="var(--gray-11)", + align="center", + max_width="540px", + ), + rx.grid( + feature_card( + "code", + "Pure Python", + "Write your frontend and backend in Python. " + "No JavaScript, no HTML templates, no CSS files to manage.", + ), + feature_card( + "zap", + "Lightning Fast Refresh", + "See your changes reflected instantly. Hot reload keeps " + "your development loop tight and productive.", + ), + feature_card( + "layers", + "60+ Built-in Components", + "From data tables to charts, forms to navigation. " + "Production-ready components out of the box.", + ), + feature_card( + "shield-check", + "Type Safe", + "Full type safety across your entire stack. " + "Catch bugs at development time, not in production.", + ), + feature_card( + "database", + "Built-in State Management", + "Reactive state that syncs between frontend and backend " + "automatically. No boilerplate, no Redux.", + ), + feature_card( + "rocket", + "One-Command Deploy", + "Deploy to production with a single command. " + "Built-in hosting or bring your own infrastructure.", + ), + columns={"base": "1", "sm": "2", "lg": "3"}, + spacing="5", + width="100%", + ), + spacing="5", + align="center", + py="6", + ), + size="4", + ), + id="features", + ) + + +def step_card(number: str, title: str, description: str) -> rx.Component: + return rx.vstack( + rx.flex( + rx.text(number, size="5", weight="bold", color="white"), + align="center", + justify="center", + style={ + "width": "48px", + "height": "48px", + "border_radius": "50%", + "background": "var(--accent-9)", + "flex_shrink": "0", + }, + ), + rx.heading(title, size="5", weight="bold"), + rx.text(description, size="3", color="var(--gray-11)", line_height="1.6"), + spacing="3", + align="center", + flex="1", + ) + + +def how_it_works() -> rx.Component: + return rx.section( + rx.container( + rx.vstack( + rx.badge("How It Works", variant="surface", size="2", radius="full"), + rx.heading("Up and running in minutes", size="8", weight="bold", align="center"), + rx.text( + "Three simple steps to go from idea to deployed application.", + size="4", + color="var(--gray-11)", + align="center", + ), + rx.grid( + step_card( + "1", + "Install & Initialize", + "Install the framework with pip and scaffold a new project " + "with a single command. Choose from starter templates.", + ), + step_card( + "2", + "Build Your App", + "Write components in pure Python. Use reactive state to " + "handle user interactions. Style with built-in themes.", + ), + step_card( + "3", + "Deploy", + "Push to production with one command. Automatic SSL, " + "CDN, and scaling handled for you.", + ), + columns={"base": "1", "md": "3"}, + spacing="6", + width="100%", + ), + spacing="5", + align="center", + py="6", + ), + size="4", + ), + id="how-it-works", + style={"background": "var(--accent-2)"}, + ) + + +def pricing_card( + name: str, price: str, period: str, description: str, + features: list, highlighted: bool = False, +) -> rx.Component: + return rx.card( + rx.vstack( + rx.heading(name, size="5", weight="bold"), + rx.hstack( + rx.heading(price, size="8", weight="bold"), + rx.text(period, size="3", color="var(--gray-11)", style={"align_self": "flex-end", "padding_bottom": "4px"}), + align="end", + spacing="1", + ), + rx.text(description, size="2", color="var(--gray-11)"), + rx.separator(size="4"), + rx.vstack( + *[ + rx.hstack( + rx.icon("check", size=16, color="var(--accent-9)"), + rx.text(f, size="2"), + spacing="2", + align="center", + ) + for f in features + ], + spacing="2", + width="100%", + ), + rx.button( + "Get Started", + size="3", + width="100%", + radius="full", + variant="solid" if highlighted else "outline", + high_contrast=highlighted, + ), + spacing="4", + p="2", + ), + size="3", + style={"border": "2px solid var(--accent-9)"} if highlighted else {}, + ) + + +def pricing() -> rx.Component: + return rx.section( + rx.container( + rx.vstack( + rx.badge("Pricing", variant="surface", size="2", radius="full"), + rx.heading("Simple, transparent pricing", size="8", weight="bold", align="center"), + rx.text( + "No hidden fees. Start free and scale as you grow.", + size="4", + color="var(--gray-11)", + align="center", + ), + rx.grid( + pricing_card( + "Hobby", + "$0", + "/month", + "Perfect for side projects and learning.", + ["1 project", "Community support", "Basic analytics", "Custom domain"], + ), + pricing_card( + "Pro", + "$29", + "/month", + "For professionals shipping real products.", + ["Unlimited projects", "Priority support", "Advanced analytics", "Team collaboration", "Custom branding"], + highlighted=True, + ), + pricing_card( + "Enterprise", + "$99", + "/month", + "For teams that need full control.", + ["Everything in Pro", "SSO & SAML", "Dedicated infrastructure", "SLA guarantee", "24/7 phone support"], + ), + columns={"base": "1", "md": "3"}, + spacing="5", + width="100%", + ), + spacing="5", + align="center", + py="6", + ), + size="4", + ), + id="pricing", + ) + + +def testimonial_card(quote: str, name: str, role: str, initials: str) -> rx.Component: + return rx.card( + rx.vstack( + rx.hstack( + *[rx.icon("star", size=14, color="var(--amber-9)") for _ in range(5)], + spacing="1", + ), + rx.text( + f"\\"{quote}\\"", + size="3", + style={"font_style": "italic"}, + color="var(--gray-12)", + line_height="1.6", + ), + rx.hstack( + rx.avatar(fallback=initials, size="3", radius="full"), + rx.vstack( + rx.text(name, size="2", weight="bold"), + rx.text(role, size="1", color="var(--gray-11)"), + spacing="0", + ), + align="center", + spacing="3", + ), + spacing="4", + ), + size="3", + ) + + +def testimonials() -> rx.Component: + return rx.section( + rx.container( + rx.vstack( + rx.badge("Testimonials", variant="surface", size="2", radius="full"), + rx.heading("Loved by developers", size="8", weight="bold", align="center"), + rx.text( + "See what developers around the world are saying.", + size="4", + color="var(--gray-11)", + align="center", + ), + rx.grid( + testimonial_card( + "This cut our development time in half. We shipped our MVP in two weeks instead of two months.", + "Sarah Chen", + "CTO at LaunchPad", + "SC", + ), + testimonial_card( + "Finally, a framework that lets me build full-stack apps without leaving Python. Game changer.", + "Marcus Johnson", + "Senior Engineer at DataFlow", + "MJ", + ), + testimonial_card( + "The component library is incredible. I spent zero time building UI primitives and all my time on business logic.", + "Priya Patel", + "Founder of MetricsDash", + "PP", + ), + columns={"base": "1", "md": "3"}, + spacing="5", + width="100%", + ), + spacing="5", + align="center", + py="6", + ), + size="4", + ), + id="testimonials", + style={"background": "var(--accent-2)"}, + ) + + +def cta() -> rx.Component: + return rx.section( + rx.container( + rx.card( + rx.vstack( + rx.heading("Ready to build something amazing?", size="7", weight="bold", align="center"), + rx.text( + "Join thousands of developers shipping faster with pure Python. " + "Get started in under 60 seconds.", + size="4", + color="var(--gray-11)", + align="center", + max_width="480px", + ), + rx.hstack( + rx.button( + rx.icon("arrow-right", size=16), + "Start Building", + size="4", + high_contrast=True, + radius="full", + ), + rx.button( + "Talk to Sales", + size="4", + variant="outline", + radius="full", + ), + spacing="3", + ), + spacing="5", + align="center", + py="6", + ), + size="5", + ), + size="4", + ), + ) + + +def footer() -> rx.Component: + return rx.el.footer( + rx.container( + rx.vstack( + rx.separator(size="4"), + rx.hstack( + rx.hstack( + rx.icon("hexagon", size=20, color="var(--accent-9)"), + rx.text("Acme", size="3", weight="bold"), + align="center", + spacing="2", + ), + rx.hstack( + rx.link("Privacy", href="#", underline="none", size="2", color="var(--gray-11)"), + rx.link("Terms", href="#", underline="none", size="2", color="var(--gray-11)"), + rx.link("Contact", href="#", underline="none", size="2", color="var(--gray-11)"), + spacing="4", + ), + justify="between", + align="center", + width="100%", + ), + rx.text( + "\\u00a9 2026 Acme Inc. All rights reserved.", + size="1", + color="var(--gray-11)", + ), + spacing="4", + py="6", + ), + size="4", + ), + ) + + +def index() -> rx.Component: + return rx.el.main( + navbar(), + hero(), + stats_bar(), + features(), + how_it_works(), + pricing(), + testimonials(), + cta(), + footer(), + ) + + +app = rx.App() +app.add_page( + index, + title="Acme - Ship Products 10x Faster", + description="Build beautiful full-stack web apps with pure Python. No JavaScript required.", +) +''' + + +@dataclass(frozen=True) +class LighthouseBenchmarkResult: + """A structured Lighthouse benchmark result.""" + + report: dict[str, Any] + report_path: Path + summary: str + failures: list[str] + + +def should_run_lighthouse() -> bool: + """Check whether Lighthouse benchmarks are enabled. + + Returns: + Whether Lighthouse benchmarks are enabled. + """ + return os.environ.get(LIGHTHOUSE_RUN_ENV_VAR, "").lower() in TRUTHY_ENV_VALUES + + +def format_score(score: float | None) -> str: + """Format a Lighthouse score for display. + + Args: + score: The Lighthouse score in the 0-1 range. + + Returns: + The score formatted as a 0-100 string. + """ + if score is None: + return "n/a" + return str(round(score * 100)) + + +def format_lighthouse_summary( + report: dict[str, Any], report_path: Path, label: str = "blank prod app" +) -> str: + """Format a compact Lighthouse score summary. + + Args: + report: The parsed Lighthouse JSON report. + report_path: The saved report path. + label: A short label describing the app under test. + + Returns: + A human-readable multi-line summary of Lighthouse scores. + """ + lines = [ + f"Lighthouse summary for {label}", + "", + f"{'Category':<16} {'Score':>5} {'Target':>6} {'Status':>6}", + f"{'-' * 16} {'-' * 5} {'-' * 6} {'-' * 6}", + ] + failure_details = [] + + for category_name, threshold in LIGHTHOUSE_CATEGORY_THRESHOLDS.items(): + score = report["categories"][category_name]["score"] + passed = score is not None and score >= threshold + lines.append( + f"{category_name:<16} {format_score(score):>5} {round(threshold * 100):>6} {'PASS' if passed else 'FAIL':>6}" + ) + if not passed: + failure_details.append( + f"- {category_name}: {get_category_failure_details(report, category_name)}" + ) + + lines.extend([ + "", + f"Report: {report_path}", + ]) + if failure_details: + lines.extend([ + "", + "Lowest-scoring audits:", + *failure_details, + ]) + + return "\n".join(lines) + + +def get_lighthouse_command() -> list[str]: + """Resolve the Lighthouse CLI command. + + Returns: + The command prefix used to invoke Lighthouse. + """ + if command := os.environ.get(LIGHTHOUSE_COMMAND_ENV_VAR): + return shlex.split(command) + if shutil.which("lighthouse") is not None: + return ["lighthouse"] + if shutil.which("npx") is not None: + return ["npx", "--yes", LIGHTHOUSE_CLI_PACKAGE] + if shutil.which("pnpx") is not None: + return ["pnpx", LIGHTHOUSE_CLI_PACKAGE] + pytest.skip( + "Lighthouse CLI is unavailable. " + "Install `lighthouse`, make `npx` or `pnpx` available, " + f"or set {LIGHTHOUSE_COMMAND_ENV_VAR}." + ) + + +def _format_subprocess_output(output: str | bytes | None) -> str: + """Normalize subprocess output for failure messages. + + Args: + output: The captured subprocess output. + + Returns: + The output as a decoded string. + """ + if output is None: + return "" + if isinstance(output, bytes): + return output.decode(errors="replace") + return output + + +@cache +def _prepare_lighthouse_command(command: tuple[str, ...]) -> tuple[str, ...]: + """Warm package-runner-based Lighthouse commands before the benchmark. + + Args: + command: The Lighthouse command prefix. + + Returns: + The original command prefix. + """ + if not command or command[0] not in {"npx", "pnpx"}: + return command + + prepare_command = [*command, "--version"] + try: + subprocess.run( + prepare_command, + check=True, + capture_output=True, + text=True, + timeout=LIGHTHOUSE_COMMAND_PREP_TIMEOUT_SECONDS, + ) + except subprocess.CalledProcessError as err: + pytest.fail( + "Lighthouse CLI preparation failed. " + "If Lighthouse is not already installed, make sure the npm registry " + f"is reachable or set {LIGHTHOUSE_COMMAND_ENV_VAR} to an installed CLI.\n" + f"Command: {' '.join(prepare_command)}\n" + f"stdout:\n{_format_subprocess_output(err.stdout)}\n" + f"stderr:\n{_format_subprocess_output(err.stderr)}" + ) + except subprocess.TimeoutExpired as err: + pytest.fail( + f"Lighthouse CLI preparation timed out after {err.timeout}s. " + "If Lighthouse is not already installed, make sure the npm registry " + f"is reachable or set {LIGHTHOUSE_COMMAND_ENV_VAR} to an installed CLI.\n" + f"Command: {' '.join(prepare_command)}\n" + f"stdout:\n{_format_subprocess_output(err.stdout)}\n" + f"stderr:\n{_format_subprocess_output(err.stderr)}" + ) + + return command + + +def _get_lighthouse_target_url(url: str) -> str: + """Convert bind-all URLs into loopback URLs that browser clients can reach. + + Args: + url: The reported frontend URL. + + Returns: + A client-reachable URL for Lighthouse. + """ + parsed = urlsplit(url) + replacement_host = { + "0.0.0.0": "127.0.0.1", + "::": "::1", + }.get(parsed.hostname or "") + if replacement_host is None: + return url + + auth = "" + if parsed.username is not None: + auth = parsed.username + if parsed.password is not None: + auth += f":{parsed.password}" + auth += "@" + + host = replacement_host + if ":" in host: + host = f"[{host}]" + + netloc = f"{auth}{host}" + if parsed.port is not None: + netloc = f"{netloc}:{parsed.port}" + + return urlunsplit(parsed._replace(netloc=netloc)) + + +def get_chrome_path() -> str: + """Resolve the Chromium executable used by Lighthouse. + + Returns: + The path to the Chromium executable Lighthouse should launch. + """ + if chrome_path := os.environ.get(LIGHTHOUSE_CHROME_PATH_ENV_VAR): + resolved_path = Path(chrome_path).expanduser() + if not resolved_path.exists(): + pytest.skip( + f"{LIGHTHOUSE_CHROME_PATH_ENV_VAR} points to a missing binary: {resolved_path}" + ) + return str(resolved_path) + + sync_api = pytest.importorskip( + "playwright.sync_api", + reason="Playwright is required to locate a Chromium binary for Lighthouse.", + ) + candidates: list[Path] = [] + with sync_api.sync_playwright() as playwright: + candidates.append(Path(playwright.chromium.executable_path)) + + browser_cache_dirs = [ + Path.home() / ".cache" / "ms-playwright", + Path.home() / "Library" / "Caches" / "ms-playwright", + ] + if local_app_data := os.environ.get("LOCALAPPDATA"): + browser_cache_dirs.append(Path(local_app_data) / "ms-playwright") + + browser_glob_patterns = [ + "chromium_headless_shell-*/*/chrome-headless-shell", + "chromium-*/*/chrome", + "chromium-*/*/chrome.exe", + "chromium-*/*/Chromium.app/Contents/MacOS/Chromium", + ] + for cache_dir in browser_cache_dirs: + if not cache_dir.exists(): + continue + for pattern in browser_glob_patterns: + candidates.extend(sorted(cache_dir.glob(pattern), reverse=True)) + + for resolved_path in candidates: + if resolved_path.exists(): + return str(resolved_path) + + pytest.skip( + "Playwright Chromium is not installed. " + "Run `uv run playwright install chromium --only-shell` first." + ) + + +def get_category_failure_details(report: dict[str, Any], category_name: str) -> str: + """Summarize the lowest-scoring weighted audits in a Lighthouse category. + + Args: + report: The parsed Lighthouse JSON report. + category_name: The category to summarize. + + Returns: + A short summary of the lowest-scoring weighted audits. + """ + category = report["categories"][category_name] + audits = report["audits"] + failing_audits: list[tuple[float, str]] = [] + + for audit_ref in category["auditRefs"]: + if audit_ref["weight"] <= 0: + continue + audit = audits[audit_ref["id"]] + score = audit.get("score") + if score is None or score >= 1: + continue + failing_audits.append((score, audit["title"])) + + if not failing_audits: + return "no weighted audit details" + + failing_audits.sort(key=operator.itemgetter(0)) + return ", ".join( + f"{title} ({format_score(score)})" for score, title in failing_audits[:3] + ) + + +def run_lighthouse(url: str, report_path: Path) -> dict[str, Any]: + """Run Lighthouse against a URL and return the parsed JSON report. + + Args: + url: The URL to audit. + report_path: Where to save the JSON report. + + Returns: + The parsed Lighthouse JSON report. + """ + command = [ + *_prepare_lighthouse_command(tuple(get_lighthouse_command())), + url, + "--output=json", + f"--output-path={report_path}", + f"--chrome-path={get_chrome_path()}", + f"--only-categories={','.join(LIGHTHOUSE_CATEGORIES)}", + "--quiet", + "--chrome-flags=--headless=new --no-sandbox --disable-dev-shm-usage", + ] + + try: + subprocess.run( + command, + check=True, + capture_output=True, + text=True, + timeout=LIGHTHOUSE_RUN_TIMEOUT_SECONDS, + ) + except subprocess.CalledProcessError as err: + pytest.fail( + "Lighthouse execution failed.\n" + f"Command: {' '.join(command)}\n" + f"stdout:\n{_format_subprocess_output(err.stdout)}\n" + f"stderr:\n{_format_subprocess_output(err.stderr)}" + ) + except subprocess.TimeoutExpired as err: + pytest.fail( + f"Lighthouse execution timed out after {err.timeout}s.\n" + f"Command: {' '.join(command)}\n" + f"stdout:\n{_format_subprocess_output(err.stdout)}\n" + f"stderr:\n{_format_subprocess_output(err.stderr)}" + ) + return json.loads(report_path.read_text()) + + +def _ensure_lighthouse_app( + root: Path, app_name: str, page_source: str | None = None +) -> None: + """Initialize a Lighthouse benchmark app. + + Args: + root: The app root directory. + app_name: The app name for initialization. + page_source: Optional custom page source to overwrite the generated page. + """ + root.mkdir(parents=True, exist_ok=True) + with chdir(root): + initialize_default_app(app_name) + if page_source is not None: + (Path(app_name) / f"{app_name}.py").write_text(page_source) + + +def _run_prod_lighthouse_benchmark( + app_root: Path, + app_name: str, + report_path: Path, + label: str, +) -> LighthouseBenchmarkResult: + """Run Lighthouse against a Reflex app via ``reflex run --env prod``. + + Uses the real production code path so the benchmark automatically + reflects any future changes to how Reflex serves apps in prod. + + Args: + app_root: The app root to initialize or reuse. + app_name: The app name matching the directory layout. + report_path: Where to save the Lighthouse JSON report. + label: A short label for the summary output. + + Returns: + A structured benchmark result. + """ + report_path.parent.mkdir(parents=True, exist_ok=True) + + proc = subprocess.Popen( + [ + "uv", + "run", + "reflex", + "run", + "--env", + "prod", + "--frontend-only", + "--loglevel", + "info", + ], + cwd=str(app_root), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + ) + + # Wait for the frontend URL to appear in stdout. + frontend_url = None + captured_output: list[str] = [] + deadline = time.monotonic() + 120 + assert proc.stdout is not None + while time.monotonic() < deadline: + line = proc.stdout.readline() + if not line: + break + captured_output.append(line) + m = re.search(r"App running at:\s*(http\S+)", line) + if m: + frontend_url = m.group(1).rstrip("/") + break + + if frontend_url is None: + proc.terminate() + try: + proc.wait(timeout=10) + except subprocess.TimeoutExpired: + proc.kill() + proc.wait() + output = "".join(captured_output) + pytest.fail( + f"reflex run --env prod did not start within timeout for {label}\n" + f"Captured output:\n{output}" + ) + + benchmark_url = _get_lighthouse_target_url(frontend_url) + + # Warmup request: ensure the server is fully ready before benchmarking. + warmup_deadline = time.monotonic() + 30 + while time.monotonic() < warmup_deadline: + try: + urllib.request.urlopen(benchmark_url, timeout=5) + break + except Exception: + time.sleep(0.5) + else: + proc.terminate() + proc.wait(timeout=10) + pytest.fail( + f"Warmup request to {benchmark_url} " + f"(reported as {frontend_url}) never succeeded for {label}" + ) + + try: + report = run_lighthouse(benchmark_url, report_path) + finally: + proc.terminate() + try: + proc.wait(timeout=10) + except subprocess.TimeoutExpired: + proc.kill() + proc.wait() + + failures = [] + for category_name, threshold in LIGHTHOUSE_CATEGORY_THRESHOLDS.items(): + score = report["categories"][category_name]["score"] + if score is None or score < threshold: + failures.append(category_name) + + return LighthouseBenchmarkResult( + report=report, + report_path=report_path, + summary=format_lighthouse_summary(report, report_path, label=label), + failures=failures, + ) + + +def run_landing_prod_lighthouse_benchmark( + app_root: Path, + report_path: Path, +) -> LighthouseBenchmarkResult: + """Run Lighthouse against a single-page landing app in prod mode. + + Args: + app_root: The app root to initialize or reuse. + report_path: Where to save the Lighthouse JSON report. + + Returns: + A structured benchmark result. + """ + _ensure_lighthouse_app(app_root, LIGHTHOUSE_LANDING_APP_NAME, LANDING_PAGE_SOURCE) + return _run_prod_lighthouse_benchmark( + app_root=app_root, + app_name=LIGHTHOUSE_LANDING_APP_NAME, + report_path=report_path, + label="landing page prod app", + ) diff --git a/tests/integration/test_lighthouse.py b/tests/integration/test_lighthouse.py new file mode 100644 index 00000000000..fc89c6c4587 --- /dev/null +++ b/tests/integration/test_lighthouse.py @@ -0,0 +1,50 @@ +"""Lighthouse benchmark tests for production Reflex apps.""" + +from __future__ import annotations + +from pathlib import Path + +import pytest + +from .lighthouse_utils import ( + run_landing_prod_lighthouse_benchmark, + should_run_lighthouse, +) + +pytestmark = pytest.mark.skipif( + not should_run_lighthouse(), + reason="Set REFLEX_RUN_LIGHTHOUSE=1 to run Lighthouse benchmark tests.", +) + + +@pytest.fixture(scope="module") +def lighthouse_landing_app_root( + tmp_path_factory: pytest.TempPathFactory, +) -> Path: + """Get the app root for the landing-page Lighthouse benchmark. + + Args: + tmp_path_factory: Pytest helper for allocating temporary directories. + + Returns: + The app root path for the landing-page benchmark app. + """ + return tmp_path_factory.mktemp("lighthouse_landing_app") + + +def test_landing_page_lighthouse_scores( + lighthouse_landing_app_root: Path, + tmp_path: Path, +): + """Assert that a single-page landing app stays in the 90s across Lighthouse categories.""" + result = run_landing_prod_lighthouse_benchmark( + app_root=lighthouse_landing_app_root, + report_path=tmp_path / "landing-prod-lighthouse.json", + ) + print(result.summary) + + if result.failures: + pytest.fail( + "Lighthouse thresholds not met. See score summary above.", + pytrace=False, + ) diff --git a/tests/integration/test_precompressed_frontend.py b/tests/integration/test_precompressed_frontend.py new file mode 100644 index 00000000000..8382c3ac55f --- /dev/null +++ b/tests/integration/test_precompressed_frontend.py @@ -0,0 +1,105 @@ +"""Integration tests for precompressed production frontend responses.""" + +from __future__ import annotations + +from collections.abc import Generator +from http.client import HTTPConnection +from urllib.parse import urlsplit + +import pytest + +from reflex.testing import AppHarness, AppHarnessProd + + +def PrecompressedFrontendApp(): + """A minimal app for production static frontend checks.""" + import reflex as rx + + app = rx.App() + + @app.add_page + def index(): + return rx.el.main( + rx.heading("Precompressed Frontend"), + rx.text("Hello from Reflex"), + ) + + +def _request_raw( + frontend_url: str, + path: str, + headers: dict[str, str] | None = None, +) -> tuple[int, dict[str, str], bytes]: + """Send a raw HTTP request without client-side decompression. + + Args: + frontend_url: The frontend base URL. + path: The request path. + headers: Optional request headers. + + Returns: + The status code, response headers, and raw response body. + """ + parsed = urlsplit(frontend_url) + assert parsed.hostname is not None + assert parsed.port is not None + connection = HTTPConnection(parsed.hostname, parsed.port, timeout=10) + connection.request("GET", path, headers=headers or {}) + response = connection.getresponse() + body = response.read() + response_headers = {key.lower(): value for key, value in response.getheaders()} + status = response.status + connection.close() + return status, response_headers, body + + +@pytest.fixture(scope="module") +def prod_test_app( + app_harness_env: type[AppHarness], + tmp_path_factory, +) -> Generator[AppHarness, None, None]: + """Start the precompressed test app in production mode only. + + Yields: + A running production app harness. + """ + if app_harness_env is not AppHarnessProd: + pytest.skip("precompressed frontend checks are prod-only") + + with app_harness_env.create( + root=tmp_path_factory.mktemp("precompressed_frontend"), + app_name="precompressed_frontend", + app_source=PrecompressedFrontendApp, + ) as harness: + yield harness + + +def test_prod_frontend_serves_precompressed_index_html(prod_test_app: AppHarness): + """Root HTML should be served from its precompressed sidecar.""" + assert prod_test_app.frontend_url is not None + + status, headers, body = _request_raw( + prod_test_app.frontend_url, + "/", + headers={"Accept-Encoding": "gzip"}, + ) + + assert status == 200 + assert headers["content-encoding"] == "gzip" + assert headers["vary"] == "Accept-Encoding" + assert body.startswith(b"\x1f\x8b") + + +def test_prod_frontend_serves_precompressed_404_fallback(prod_test_app: AppHarness): + """Unknown routes should serve the compressed 404.html fallback.""" + assert prod_test_app.frontend_url is not None + + status, headers, body = _request_raw( + prod_test_app.frontend_url, + "/missing-route", + headers={"Accept-Encoding": "gzip"}, + ) + + assert status == 404 + assert headers["content-encoding"] == "gzip" + assert body.startswith(b"\x1f\x8b") diff --git a/tests/integration/test_prod_build_pipeline.py b/tests/integration/test_prod_build_pipeline.py new file mode 100644 index 00000000000..726ac535fef --- /dev/null +++ b/tests/integration/test_prod_build_pipeline.py @@ -0,0 +1,202 @@ +"""Integration tests for the production build pipeline. + +Tests precompressed assets, image optimization, and CSS purging +against a real prod build served via AppHarnessProd. +""" + +from __future__ import annotations + +import struct +import zlib +from collections.abc import Generator +from http.client import HTTPConnection +from pathlib import Path +from urllib.parse import urlsplit + +import pytest + +from reflex.testing import AppHarness, AppHarnessProd + + +def _make_test_png(min_size: int = 2048) -> bytes: + """Build a valid PNG that is at least *min_size* bytes. + + Returns: + The PNG file bytes. + """ + ihdr_data = b"IHDR" + struct.pack(">II", 2, 2) + b"\x08\x02\x00\x00\x00" + ihdr = ( + struct.pack(">I", 13) + + ihdr_data + + struct.pack(">I", zlib.crc32(ihdr_data) & 0xFFFFFFFF) + ) + + raw_rows = b"\x00\xff\x00\x00\x00\xff\x00" * 2 + compressed = zlib.compress(raw_rows) + idat_data = b"IDAT" + compressed + idat = ( + struct.pack(">I", len(compressed)) + + idat_data + + struct.pack(">I", zlib.crc32(idat_data) & 0xFFFFFFFF) + ) + + iend = b"\x00\x00\x00\x00IEND\xaeB`\x82" + png = b"\x89PNG\r\n\x1a\n" + ihdr + idat + iend + + if len(png) < min_size: + text_payload = b"tEXtComment\x00" + b"A" * (min_size - len(png) - 12) + text_chunk = ( + struct.pack(">I", len(text_payload)) + + text_payload + + struct.pack(">I", zlib.crc32(text_payload) & 0xFFFFFFFF) + ) + png = png[:-12] + text_chunk + iend + return png + + +def ProdBuildPipelineApp(): + """A minimal app with an image asset for build pipeline testing.""" + import reflex as rx + + app = rx.App() + + @app.add_page + def index(): + return rx.el.main( + rx.heading("Build Pipeline Test"), + rx.text("Hello"), + rx.image(src="/test_image.png", alt="test"), + ) + + +def _request_raw( + url: str, + path: str, + headers: dict[str, str] | None = None, +) -> tuple[int, dict[str, str], bytes]: + """Send a raw HTTP request without client-side decompression. + + Returns: + The status code, response headers, and raw body. + """ + parsed = urlsplit(url) + assert parsed.hostname is not None + conn = HTTPConnection(parsed.hostname, parsed.port, timeout=10) + conn.request("GET", path, headers=headers or {}) + resp = conn.getresponse() + body = resp.read() + hdrs = {k.lower(): v for k, v in resp.getheaders()} + status = resp.status + conn.close() + return status, hdrs, body + + +@pytest.fixture(scope="module") +def prod_app( + app_harness_env: type[AppHarness], + tmp_path_factory: pytest.TempPathFactory, +) -> Generator[AppHarness, None, None]: + """Build and serve the test app in production mode. + + Yields: + A running production app harness. + """ + if app_harness_env is not AppHarnessProd: + pytest.skip("build pipeline checks are prod-only") + + root = tmp_path_factory.mktemp("prod_build_pipeline") + harness = app_harness_env.create( + root=root, + app_name="prod_build_pipeline", + app_source=ProdBuildPipelineApp, + ) + # Initialize the app (creates .web/public/ etc.) but don't start yet. + harness._initialize_app() + + # Place a test PNG in .web/public/ so the Vite build picks it up. + # (Reflex serves `assets/` via the backend at runtime, but the Vite + # image-optimize plugin only processes files inside the build tree.) + import reflex.utils.prerequisites as prerequisites + + public_dir = root / prerequisites.get_web_dir() / "public" + public_dir.mkdir(exist_ok=True) + (public_dir / "test_image.png").write_bytes(_make_test_png()) + + # Now run the rest of startup (backend, build, frontend server). + harness._start_backend() + harness._start_frontend() + harness._wait_frontend() + try: + yield harness + finally: + harness.stop() + + +def _find_build_files(harness: AppHarness, pattern: str) -> list[Path]: + """Find files matching a glob pattern in the prod build output. + + Returns: + Sorted list of matching paths. + """ + import reflex.constants as constants + import reflex.utils.prerequisites as prerequisites + + static_dir = harness.app_path / prerequisites.get_web_dir() / constants.Dirs.STATIC + return sorted(static_dir.rglob(pattern)) + + +# -- Precompressed assets -- + + +def test_js_bundles_have_gz_sidecars(prod_app: AppHarness): + """Production JS bundles should have .gz sidecar files.""" + assert _find_build_files(prod_app, "**/*.js"), "No JS files in build" + assert _find_build_files(prod_app, "**/*.js.gz"), "No .js.gz sidecars found" + + +def test_css_bundles_have_gz_sidecars(prod_app: AppHarness): + """Production CSS bundles should have .gz sidecar files.""" + assert _find_build_files(prod_app, "**/*.css"), "No CSS files in build" + assert _find_build_files(prod_app, "**/*.css.gz"), "No .css.gz sidecars found" + + +def test_gzip_content_negotiation(prod_app: AppHarness): + """Server should return gzip-encoded response when client accepts it.""" + assert prod_app.frontend_url is not None + # Request a JS bundle rather than / since HTML may be small + js_files = _find_build_files(prod_app, "assets/**/*.js") + assert js_files, "No JS bundles to test" + + # Get the relative path from the static dir to use as URL path + import reflex.constants as constants + import reflex.utils.prerequisites as prerequisites + + static_dir = prod_app.app_path / prerequisites.get_web_dir() / constants.Dirs.STATIC + js_path = "/" + str(js_files[0].relative_to(static_dir)) + + status, headers, body = _request_raw( + prod_app.frontend_url, + js_path, + headers={"Accept-Encoding": "gzip"}, + ) + assert status == 200 + assert headers.get("content-encoding") == "gzip" + assert body[:2] == b"\x1f\x8b" + + +# -- Image optimization -- + + +def test_png_has_webp_sidecar(prod_app: AppHarness): + """PNG assets should produce WebP sidecars after the build.""" + png_files = _find_build_files(prod_app, "**/test_image.png") + if not png_files: + pytest.skip( + "test_image.png not in build output — Vite may have " + "fingerprinted or inlined the asset" + ) + + webp_files = _find_build_files(prod_app, "**/test_image.webp") + assert webp_files, ( + "No test_image.webp sidecar — image optimization may not be running" + ) diff --git a/tests/units/compiler/test_compiler.py b/tests/units/compiler/test_compiler.py index 218ae1de6d2..8c69c51d8e4 100644 --- a/tests/units/compiler/test_compiler.py +++ b/tests/units/compiler/test_compiler.py @@ -1,10 +1,12 @@ import importlib.util import os +import re from pathlib import Path import pytest from pytest_mock import MockerFixture from reflex_base import constants +from reflex_base.compiler.templates import vite_config_template from reflex_base.constants.compiler import PageNames from reflex_base.utils.imports import ImportVar, ParsedImportDict from reflex_base.vars.base import Var @@ -359,6 +361,123 @@ def test_compile_nonexistent_stylesheet(tmp_path, mocker: MockerFixture): compiler.compile_root_stylesheet(stylesheets) +class TestGetRadixThemesStylesheets: + """Tests for the granular Radix Themes stylesheet selection.""" + + def test_no_roots_falls_back_to_monolith(self): + """When no roots are provided, use the monolithic stylesheet.""" + assert compiler.get_radix_themes_stylesheets(None) == [ + "@radix-ui/themes/styles.css" + ] + + def test_literal_accent_emits_granular_imports(self): + """A literal accent_color emits only the needed granular imports.""" + import reflex as rx + + sheets = compiler.get_radix_themes_stylesheets([rx.theme(accent_color="blue")]) + assert sheets == [ + "@radix-ui/themes/tokens/base.css", + # blue's natural gray pairing is slate + "@radix-ui/themes/tokens/colors/slate.css", + "@radix-ui/themes/tokens/colors/blue.css", + "@radix-ui/themes/components.css", + "@radix-ui/themes/utilities.css", + ] + + def test_explicit_gray_overrides_auto_pairing(self): + """An explicit gray_color replaces the accent's auto-paired gray.""" + import reflex as rx + + sheets = compiler.get_radix_themes_stylesheets([ + rx.theme(accent_color="red", gray_color="mauve") + ]) + assert "@radix-ui/themes/tokens/colors/mauve.css" in sheets + assert "@radix-ui/themes/tokens/colors/red.css" in sheets + # The default auto pairing for red is also mauve, so no extra colors. + color_sheets = [s for s in sheets if "/colors/" in s] + assert len(color_sheets) == 2 + + def test_nested_themes_union_colors(self): + """Nested Theme components contribute the union of their colors.""" + import reflex as rx + + root = rx.box( + rx.theme(accent_color="green"), + rx.theme(accent_color="pink"), + ) + sheets = compiler.get_radix_themes_stylesheets([root]) + color_sheets = {s for s in sheets if "/colors/" in s} + assert "@radix-ui/themes/tokens/colors/green.css" in color_sheets + assert "@radix-ui/themes/tokens/colors/pink.css" in color_sheets + + def test_dynamic_color_falls_back_to_monolith(self): + """A state-driven Theme color forces the monolithic stylesheet.""" + from typing import Literal + + import reflex as rx + + class _S(rx.State): + color: Literal["red", "blue"] = "red" + + sheets = compiler.get_radix_themes_stylesheets([ + rx.theme(accent_color=_S.color) + ]) + assert sheets == ["@radix-ui/themes/styles.css"] + + +class TestCollectWindowLibraryImports: + """Tests for the import collection that drives window.__reflex.""" + + @pytest.fixture(autouse=True) + def _isolate_dynamic_imports(self): + from reflex_base.components.dynamic import reset_dynamic_component_imports + + reset_dynamic_component_imports() + yield + reset_dynamic_component_imports() + + def test_internal_modules_always_star_imported(self): + """Internal Reflex modules map to None (star import) so dynamic + components / plugins reading window.__reflex find what they need + even when the app has no static external references. + """ + result = compiler.collect_window_library_imports([{}]) + assert result["$/utils/state"] is None + assert "@radix-ui/themes" not in result + + def test_external_lib_uses_named_imports_from_static_usage(self): + """External library exposure on window.__reflex uses named imports + so Rolldown can tree-shake unused exports. + """ + from reflex_base.utils.imports import ImportVar + + sources = [ + {"$/utils/state": [ImportVar(tag="evalReactComponent")]}, + { + "@radix-ui/themes@3.3.0": [ + ImportVar(tag="Theme"), + ImportVar(tag="Button"), + ] + }, + ] + result = compiler.collect_window_library_imports(sources) + assert result["@radix-ui/themes"] == {"Theme", "Button"} + + def test_unions_dynamic_component_tags(self): + """Tags captured during dynamic-Component serialization are unioned + into the named-import surface so runtime-eval'd code finds them on + window.__reflex. + """ + from reflex_base.components.dynamic import dynamic_component_imports + from reflex_base.utils.imports import ImportVar + + sources = [{"@radix-ui/themes@3.3.0": [ImportVar(tag="Theme")]}] + dynamic_component_imports["@radix-ui/themes@3.3.0"] = {ImportVar(tag="Flex")} + + result = compiler.collect_window_library_imports(sources) + assert result["@radix-ui/themes"] == {"Theme", "Flex"} + + def test_create_document_root(): """Test that the document root is created correctly.""" # Test with no components. @@ -448,3 +567,49 @@ def test_create_document_root_with_meta_viewport(): assert str(root.children[0].children[2].name) == '"viewport"' # pyright: ignore [reportAttributeAccessIssue] assert str(root.children[0].children[2].content) == '"foo"' # pyright: ignore [reportAttributeAccessIssue] assert str(root.children[0].children[3].char_set) == '"utf-8"' # pyright: ignore [reportAttributeAccessIssue] + + +class TestViteConfigChunking: + """Tests for Vite config chunk splitting strategy.""" + + def _generate_vite_config(self) -> str: + return vite_config_template( + base="/", + hmr=True, + force_full_reload=False, + experimental_hmr=False, + sourcemap=False, + ) + + def test_no_monolithic_radix_ui_chunk(self): + """Radix-ui packages must not be grouped into a single monolithic chunk. + + A single 'radix-ui' chunk forces every page to download ALL radix code + even when it only uses a fraction, wasting 55+ KB on typical pages. + """ + config = self._generate_vite_config() + + # There should be no chunk rule that matches ALL @radix-ui/* packages + # under a single name like "radix-ui". + monolithic_radix = re.search(r"""name:\s*["']radix-ui["']""", config) + assert monolithic_radix is None, ( + "Vite config must not group all @radix-ui/* packages into a single " + "'radix-ui' chunk. This forces pages to download unused radix code. " + "Remove the monolithic radix-ui chunk rule and let Vite split per-route." + ) + + def test_vendor_chunks_exist_for_large_libraries(self): + """Key vendor libraries should still have dedicated chunks for caching.""" + config = self._generate_vite_config() + + # These libraries are large and benefit from dedicated chunks for + # cross-page cache reuse. + for lib_name in ["socket-io", "mantine", "recharts"]: + assert re.search(rf"""name:\s*["']{lib_name}["']""", config), ( + f"Expected dedicated chunk for '{lib_name}'" + ) + + def test_reflex_env_chunk_exists(self): + """The env.json chunk should always exist for config isolation.""" + config = self._generate_vite_config() + assert re.search(r"""name:\s*["']reflex-env["']""", config) diff --git a/tests/units/components/core/test_sticky.py b/tests/units/components/core/test_sticky.py new file mode 100644 index 00000000000..7a34f5fc099 --- /dev/null +++ b/tests/units/components/core/test_sticky.py @@ -0,0 +1,8 @@ +from reflex_components_core.core.sticky import StickyBadge + + +def test_sticky_badge_accessible_name(): + props = StickyBadge.create().render()["props"] + + assert '"aria-label":"Built with Reflex"' in props + assert 'title:"Built with Reflex"' in props diff --git a/tests/units/components/radix/test_color_mode.py b/tests/units/components/radix/test_color_mode.py new file mode 100644 index 00000000000..29209eadd7f --- /dev/null +++ b/tests/units/components/radix/test_color_mode.py @@ -0,0 +1,18 @@ +from reflex_components_radix.themes.color_mode import ColorModeIconButton + + +def test_color_mode_icon_button_accessible_defaults(): + props = ColorModeIconButton.create().render()["props"] + + assert '"aria-label":"Toggle color mode"' in props + assert 'title:"Toggle color mode"' in props + + +def test_color_mode_icon_button_accessible_overrides(): + props = ColorModeIconButton.create( + aria_label="Switch theme", + title="Switch theme", + ).render()["props"] + + assert '"aria-label":"Switch theme"' in props + assert 'title:"Switch theme"' in props diff --git a/tests/units/test_build.py b/tests/units/test_build.py new file mode 100644 index 00000000000..719d7e94d79 --- /dev/null +++ b/tests/units/test_build.py @@ -0,0 +1,20 @@ +"""Unit tests for frontend build helpers.""" + +from pathlib import Path + +from reflex.utils.build import _duplicate_index_html_to_parent_directory + + +def test_duplicate_index_html_to_parent_directory_copies_sidecars(tmp_path: Path): + """Duplicate index.html sidecars alongside copied route HTML files.""" + route_dir = tmp_path / "docs" + route_dir.mkdir() + (route_dir / "index.html").write_text("docs") + (route_dir / "index.html.gz").write_bytes(b"gzip") + (route_dir / "index.html.br").write_bytes(b"brotli") + + _duplicate_index_html_to_parent_directory(tmp_path, (".gz", ".br")) + + assert (tmp_path / "docs.html").read_text() == "docs" + assert (tmp_path / "docs.html.gz").read_bytes() == b"gzip" + assert (tmp_path / "docs.html.br").read_bytes() == b"brotli" diff --git a/tests/units/test_config.py b/tests/units/test_config.py index 38267c9c53b..71d9b0eab81 100644 --- a/tests/units/test_config.py +++ b/tests/units/test_config.py @@ -9,6 +9,7 @@ from reflex_base.constants import Endpoint, Env from reflex_base.plugins import Plugin from reflex_base.plugins.sitemap import SitemapPlugin +from reflex_base.utils.exceptions import ConfigError import reflex as rx from reflex.environment import ( @@ -133,6 +134,58 @@ def test_update_from_env_cors( ] +def test_update_from_env_frontend_compression_formats( + base_config_values: dict[str, Any], + monkeypatch: pytest.MonkeyPatch, +): + """Test comma-delimited frontend compression formats from the environment.""" + monkeypatch.setenv( + "REFLEX_FRONTEND_COMPRESSION_FORMATS", "gzip, brotli , zstd, gzip" + ) + config = rx.Config(**base_config_values) + assert config.frontend_compression_formats == ["gzip", "brotli", "zstd"] + + +def test_invalid_frontend_compression_formats(base_config_values: dict[str, Any]): + """Test that unsupported frontend compression formats raise config errors.""" + with pytest.raises( + ConfigError, + match="frontend_compression_formats contains unsupported format", + ): + rx.Config( + **base_config_values, + frontend_compression_formats=["gzip", "snappy"], + ) + + +def test_default_frontend_image_formats(base_config_values: dict[str, Any]): + """Test default image optimization config values.""" + config = rx.Config(**base_config_values) + assert config.frontend_image_formats == ["webp", "avif"] + + +def test_update_from_env_frontend_image_formats( + base_config_values: dict[str, Any], + monkeypatch: pytest.MonkeyPatch, +): + """Test comma-delimited frontend image formats from the environment.""" + monkeypatch.setenv("REFLEX_FRONTEND_IMAGE_FORMATS", "webp, avif , webp") + config = rx.Config(**base_config_values) + assert config.frontend_image_formats == ["webp", "avif"] + + +def test_invalid_frontend_image_formats(base_config_values: dict[str, Any]): + """Test that unsupported frontend image formats raise config errors.""" + with pytest.raises( + ConfigError, + match="frontend_image_formats contains unsupported format", + ): + rx.Config( + **base_config_values, + frontend_image_formats=["webp", "png"], + ) + + @pytest.mark.parametrize( ("kwargs", "expected"), [ diff --git a/tests/units/test_lighthouse_utils.py b/tests/units/test_lighthouse_utils.py new file mode 100644 index 00000000000..e0a511ab5ac --- /dev/null +++ b/tests/units/test_lighthouse_utils.py @@ -0,0 +1,156 @@ +"""Unit tests for Lighthouse benchmark utilities.""" + +import subprocess +from types import SimpleNamespace + +import pytest + +from tests.integration import lighthouse_utils + + +@pytest.fixture(autouse=True) +def clear_lighthouse_command_cache(): + """Reset cached Lighthouse command preparation between tests.""" + lighthouse_utils._prepare_lighthouse_command.cache_clear() + yield + lighthouse_utils._prepare_lighthouse_command.cache_clear() + + +def test_get_lighthouse_command_prefers_npx_before_pnpx( + monkeypatch: pytest.MonkeyPatch, +): + """Use npx first when both package runners are available.""" + monkeypatch.delenv(lighthouse_utils.LIGHTHOUSE_COMMAND_ENV_VAR, raising=False) + monkeypatch.setattr( + lighthouse_utils.shutil, + "which", + lambda command: { + "npx": "/usr/bin/npx", + "pnpx": "/usr/bin/pnpx", + }.get(command), + ) + + assert lighthouse_utils.get_lighthouse_command() == [ + "npx", + "--yes", + lighthouse_utils.LIGHTHOUSE_CLI_PACKAGE, + ] + + +def test_get_lighthouse_command_falls_back_to_pnpx( + monkeypatch: pytest.MonkeyPatch, +): + """Use pnpx when npx is unavailable.""" + monkeypatch.delenv(lighthouse_utils.LIGHTHOUSE_COMMAND_ENV_VAR, raising=False) + monkeypatch.setattr( + lighthouse_utils.shutil, + "which", + lambda command: { + "pnpx": "/usr/bin/pnpx", + }.get(command), + ) + + assert lighthouse_utils.get_lighthouse_command() == [ + "pnpx", + lighthouse_utils.LIGHTHOUSE_CLI_PACKAGE, + ] + + +def test_prepare_lighthouse_command_warms_package_runner_once( + monkeypatch: pytest.MonkeyPatch, +): + """Warm package-runner commands once before Lighthouse executes.""" + calls: list[tuple[list[str], dict[str, object]]] = [] + + def fake_run(command: list[str], **kwargs): + calls.append((command, kwargs)) + return SimpleNamespace(returncode=0) + + monkeypatch.setattr(lighthouse_utils.subprocess, "run", fake_run) + command = ("npx", "--yes", lighthouse_utils.LIGHTHOUSE_CLI_PACKAGE) + + assert lighthouse_utils._prepare_lighthouse_command(command) == command + assert lighthouse_utils._prepare_lighthouse_command(command) == command + + assert calls == [ + ( + [*command, "--version"], + { + "check": True, + "capture_output": True, + "text": True, + "timeout": lighthouse_utils.LIGHTHOUSE_COMMAND_PREP_TIMEOUT_SECONDS, + }, + ) + ] + + +def test_prepare_lighthouse_command_timeout_has_friendly_message( + monkeypatch: pytest.MonkeyPatch, +): + """Timeouts during CLI warmup should fail with helpful pytest output.""" + + def fake_run(*_args, **_kwargs): + raise subprocess.TimeoutExpired( + cmd=["npx", "--yes", lighthouse_utils.LIGHTHOUSE_CLI_PACKAGE, "--version"], + timeout=lighthouse_utils.LIGHTHOUSE_COMMAND_PREP_TIMEOUT_SECONDS, + output="prep stdout", + stderr="prep stderr", + ) + + monkeypatch.setattr(lighthouse_utils.subprocess, "run", fake_run) + command = ("npx", "--yes", lighthouse_utils.LIGHTHOUSE_CLI_PACKAGE) + + with pytest.raises(pytest.fail.Exception, match="timed out after 300s"): + lighthouse_utils._prepare_lighthouse_command(command) + + +def test_run_lighthouse_timeout_has_friendly_message( + tmp_path, + monkeypatch: pytest.MonkeyPatch, +): + """Timeouts during a Lighthouse run should be reported via pytest.fail.""" + + def fake_run(*_args, **_kwargs): + raise subprocess.TimeoutExpired( + cmd=["lighthouse", "http://localhost:3000"], + timeout=lighthouse_utils.LIGHTHOUSE_RUN_TIMEOUT_SECONDS, + output="run stdout", + stderr="run stderr", + ) + + monkeypatch.setattr( + lighthouse_utils, "_prepare_lighthouse_command", lambda command: command + ) + monkeypatch.setattr( + lighthouse_utils, "get_lighthouse_command", lambda: ["lighthouse"] + ) + monkeypatch.setattr(lighthouse_utils, "get_chrome_path", lambda: "/tmp/chrome") + monkeypatch.setattr(lighthouse_utils.subprocess, "run", fake_run) + + with pytest.raises(pytest.fail.Exception, match="timed out after 300s"): + lighthouse_utils.run_lighthouse( + "http://localhost:3000", tmp_path / "lighthouse-report.json" + ) + + +@pytest.mark.parametrize( + ("url", "expected"), + [ + ( + "http://0.0.0.0:3001/dashboard?tab=perf", + "http://127.0.0.1:3001/dashboard?tab=perf", + ), + ( + "http://[::]:3001/dashboard?tab=perf", + "http://[::1]:3001/dashboard?tab=perf", + ), + ( + "http://localhost:3001/dashboard?tab=perf", + "http://localhost:3001/dashboard?tab=perf", + ), + ], +) +def test_get_lighthouse_target_url(url: str, expected: str): + """Convert bind-all addresses into loopback addresses for browser clients.""" + assert lighthouse_utils._get_lighthouse_target_url(url) == expected diff --git a/tests/units/utils/test_exec.py b/tests/units/utils/test_exec.py new file mode 100644 index 00000000000..398d7458340 --- /dev/null +++ b/tests/units/utils/test_exec.py @@ -0,0 +1,75 @@ +"""Unit tests for execution helpers.""" + +from __future__ import annotations + +from types import SimpleNamespace + +import pytest +from starlette.responses import FileResponse + +from reflex.utils import exec as exec_utils +from reflex.utils.precompressed_staticfiles import PrecompressedStaticFiles + + +def _scope( + path: str, + accept_encoding: str | None = None, + accept: str | None = None, +) -> dict: + headers = [] + if accept_encoding is not None: + headers.append((b"accept-encoding", accept_encoding.encode())) + if accept is not None: + headers.append((b"accept", accept.encode())) + return { + "type": "http", + "http_version": "1.1", + "method": "GET", + "scheme": "http", + "path": path, + "raw_path": path.encode(), + "query_string": b"", + "headers": headers, + "client": ("127.0.0.1", 1234), + "server": ("testserver", 80), + "root_path": "", + } + + +@pytest.mark.asyncio +async def test_get_frontend_mount_uses_precompressed_staticfiles( + tmp_path, + monkeypatch: pytest.MonkeyPatch, +): + """The prod frontend mount should negotiate precompressed assets.""" + web_dir = tmp_path / ".web" + frontend_dir = web_dir / "build" / "client" / "app" + frontend_dir.mkdir(parents=True) + (frontend_dir / "index.html").write_text("hello") + (frontend_dir / "index.html.gz").write_bytes(b"compressed-index") + + config = SimpleNamespace( + frontend_path="app", + frontend_compression_formats=["gzip"], + frontend_image_formats=["webp"], + prepend_frontend_path=lambda path: ( + "/app" + path if path.startswith("/") else path + ), + ) + monkeypatch.setattr(exec_utils, "get_config", lambda: config) + monkeypatch.setattr(exec_utils, "get_web_dir", lambda: web_dir) + + mount = exec_utils.get_frontend_mount() + + assert mount.path == "/app" + assert isinstance(mount.app, PrecompressedStaticFiles) + assert tuple(fmt.name for fmt in mount.app._encodings) == ("gzip",) + assert tuple(fmt.name for fmt in mount.app._image_formats) == ("webp",) + + response = await mount.app.get_response("", _scope("/", accept_encoding="gzip")) + + assert isinstance(response, FileResponse) + assert response.status_code == 200 + assert str(response.path).endswith("index.html.gz") + assert response.headers["content-encoding"] == "gzip" + assert response.headers["vary"] == "Accept-Encoding" diff --git a/tests/units/utils/test_precompressed_staticfiles.py b/tests/units/utils/test_precompressed_staticfiles.py new file mode 100644 index 00000000000..5689af10294 --- /dev/null +++ b/tests/units/utils/test_precompressed_staticfiles.py @@ -0,0 +1,261 @@ +"""Unit tests for precompressed static file serving.""" + +from __future__ import annotations + +from pathlib import Path + +import pytest +from starlette.responses import FileResponse + +from reflex.utils.precompressed_staticfiles import PrecompressedStaticFiles + + +def _scope( + path: str, + accept_encoding: str | None = None, + accept: str | None = None, +) -> dict: + headers = [] + if accept_encoding is not None: + headers.append((b"accept-encoding", accept_encoding.encode())) + if accept is not None: + headers.append((b"accept", accept.encode())) + return { + "type": "http", + "http_version": "1.1", + "method": "GET", + "scheme": "http", + "path": path, + "raw_path": path.encode(), + "query_string": b"", + "headers": headers, + "client": ("127.0.0.1", 1234), + "server": ("testserver", 80), + "root_path": "", + } + + +@pytest.mark.asyncio +async def test_precompressed_static_files_supports_html_mode(tmp_path: Path): + """Serve a precompressed index.html sidecar for directory requests.""" + (tmp_path / "index.html").write_text("hello") + (tmp_path / "index.html.gz").write_bytes(b"compressed-index") + + static_files = PrecompressedStaticFiles( + directory=tmp_path, + html=True, + encodings=["gzip"], + ) + + response = await static_files.get_response("", _scope("/", "gzip")) + + assert isinstance(response, FileResponse) + assert response.status_code == 200 + assert str(response.path).endswith("index.html.gz") + assert response.headers["content-encoding"] == "gzip" + assert response.headers["vary"] == "Accept-Encoding" + assert response.media_type == "text/html" + + +@pytest.mark.asyncio +async def test_precompressed_static_files_supports_html_404_fallback(tmp_path: Path): + """Serve a precompressed 404.html sidecar for HTML fallback responses.""" + (tmp_path / "404.html").write_text("missing") + (tmp_path / "404.html.gz").write_bytes(b"compressed-404") + + static_files = PrecompressedStaticFiles( + directory=tmp_path, + html=True, + encodings=["gzip"], + ) + + response = await static_files.get_response("missing", _scope("/missing", "gzip")) + + assert isinstance(response, FileResponse) + assert response.status_code == 404 + assert str(response.path).endswith("404.html.gz") + assert response.headers["content-encoding"] == "gzip" + assert response.media_type == "text/html" + + +@pytest.mark.asyncio +async def test_precompressed_static_files_prefers_best_accept_encoding( + tmp_path: Path, +): + """Prefer the highest-quality configured encoding that exists on disk.""" + (tmp_path / "app.js").write_text("console.log('hello');") + (tmp_path / "app.js.gz").write_bytes(b"compressed-gzip") + (tmp_path / "app.js.br").write_bytes(b"compressed-brotli") + + static_files = PrecompressedStaticFiles( + directory=tmp_path, + encodings=["gzip", "brotli"], + ) + + response = await static_files.get_response( + "app.js", + _scope("/app.js", "gzip;q=0.5, br;q=1"), + ) + + assert isinstance(response, FileResponse) + assert str(response.path).endswith("app.js.br") + assert response.headers["content-encoding"] == "br" + assert response.media_type is not None + assert "javascript" in response.media_type + + +@pytest.mark.asyncio +async def test_precompressed_static_files_fall_back_to_identity(tmp_path: Path): + """Keep serving the original file when no accepted sidecar is available.""" + (tmp_path / "app.js").write_text("console.log('hello');") + (tmp_path / "app.js.gz").write_bytes(b"compressed-gzip") + + static_files = PrecompressedStaticFiles( + directory=tmp_path, + encodings=["gzip"], + ) + + response = await static_files.get_response( + "app.js", + _scope("/app.js", "identity"), + ) + + assert isinstance(response, FileResponse) + assert str(response.path).endswith("app.js") + assert "content-encoding" not in response.headers + assert response.headers["vary"] == "Accept-Encoding" + + +@pytest.mark.asyncio +async def test_image_format_negotiation_serves_webp(tmp_path: Path): + """Serve a WebP variant when the client accepts image/webp.""" + (tmp_path / "hero.png").write_bytes(b"png-data") + (tmp_path / "hero.webp").write_bytes(b"webp-data") + + static_files = PrecompressedStaticFiles( + directory=tmp_path, + image_formats=["webp"], + ) + + response = await static_files.get_response( + "hero.png", + _scope("/hero.png", accept="image/webp, image/png, */*"), + ) + + assert isinstance(response, FileResponse) + assert str(response.path).endswith("hero.webp") + assert response.media_type == "image/webp" + assert "Accept" in response.headers["vary"] + + +@pytest.mark.asyncio +async def test_image_format_negotiation_serves_avif(tmp_path: Path): + """Serve an AVIF variant when the client accepts image/avif.""" + (tmp_path / "photo.jpg").write_bytes(b"jpeg-data") + (tmp_path / "photo.avif").write_bytes(b"avif-data") + + static_files = PrecompressedStaticFiles( + directory=tmp_path, + image_formats=["avif"], + ) + + response = await static_files.get_response( + "photo.jpg", + _scope("/photo.jpg", accept="image/avif, image/jpeg"), + ) + + assert isinstance(response, FileResponse) + assert str(response.path).endswith("photo.avif") + assert response.media_type == "image/avif" + + +@pytest.mark.asyncio +async def test_image_format_negotiation_prefers_best_quality(tmp_path: Path): + """Prefer the highest-quality accepted image format.""" + (tmp_path / "hero.png").write_bytes(b"png-data") + (tmp_path / "hero.webp").write_bytes(b"webp-data") + (tmp_path / "hero.avif").write_bytes(b"avif-data") + + static_files = PrecompressedStaticFiles( + directory=tmp_path, + image_formats=["webp", "avif"], + ) + + response = await static_files.get_response( + "hero.png", + _scope("/hero.png", accept="image/webp;q=0.5, image/avif;q=1"), + ) + + assert isinstance(response, FileResponse) + assert str(response.path).endswith("hero.avif") + assert response.media_type == "image/avif" + + +@pytest.mark.asyncio +async def test_image_format_negotiation_falls_back_to_original(tmp_path: Path): + """Serve the original image when no accepted format variant exists.""" + (tmp_path / "hero.png").write_bytes(b"png-data") + + static_files = PrecompressedStaticFiles( + directory=tmp_path, + image_formats=["webp", "avif"], + ) + + response = await static_files.get_response( + "hero.png", + _scope("/hero.png", accept="image/png"), + ) + + assert isinstance(response, FileResponse) + assert str(response.path).endswith("hero.png") + assert "Accept" in response.headers["vary"] + + +@pytest.mark.asyncio +async def test_image_format_negotiation_ignores_non_image_files(tmp_path: Path): + """Non-image files are not affected by image format negotiation.""" + (tmp_path / "app.js").write_text("console.log('hello');") + + static_files = PrecompressedStaticFiles( + directory=tmp_path, + image_formats=["webp"], + ) + + response = await static_files.get_response( + "app.js", + _scope("/app.js", accept="image/webp, */*"), + ) + + assert isinstance(response, FileResponse) + assert str(response.path).endswith("app.js") + + +@pytest.mark.asyncio +async def test_image_and_encoding_negotiation_combined(tmp_path: Path): + """Both image format and encoding negotiation work together.""" + (tmp_path / "hero.png").write_bytes(b"png-data") + (tmp_path / "hero.webp").write_bytes(b"webp-data") + (tmp_path / "hero.webp.gz").write_bytes(b"webp-gzip") + + static_files = PrecompressedStaticFiles( + directory=tmp_path, + encodings=["gzip"], + image_formats=["webp"], + ) + + response = await static_files.get_response( + "hero.png", + _scope( + "/hero.png", + accept_encoding="gzip", + accept="image/webp, image/png", + ), + ) + + assert isinstance(response, FileResponse) + # Image format negotiation serves webp, but encoding negotiation + # does not apply since the path changed and the compressed sidecar + # is for the original path. + assert str(response.path).endswith("hero.webp") + assert response.media_type == "image/webp" + assert "Accept" in response.headers["vary"]