- This is a sample snippet. You can use this snippet to create your own
- snippets. You'll also see a javascript and scss file linked to this snippet.
-
-
diff --git a/config/snippet.js b/config/snippet.js
deleted file mode 100644
index 6289e916..00000000
--- a/config/snippet.js
+++ /dev/null
@@ -1,5 +0,0 @@
-import $ from 'jquery';
-$(() => {
- // Your code here
- console.log('Snippet loaded');
-});
diff --git a/config/snippet.scss b/config/snippet.scss
deleted file mode 100644
index db1ad41f..00000000
--- a/config/snippet.scss
+++ /dev/null
@@ -1,4 +0,0 @@
-@import '../sass/main';
-h1 {
- color: red;
-}
diff --git a/dist/app.d.ts b/dist/app.d.ts
index 6dfc328b..99750136 100644
--- a/dist/app.d.ts
+++ b/dist/app.d.ts
@@ -6,12 +6,16 @@ import Handoff from '.';
*/
declare const buildApp: (handoff: Handoff) => Promise;
/**
- * Watch the next js application
+ * Watch the next js application.
+ * Starts a custom dev server with Handoff-specific watchers and hot-reloading.
+ *
* @param handoff
*/
export declare const watchApp: (handoff: Handoff) => Promise;
/**
- * Watch the next js application
+ * Watch the next js application using the standard Next.js dev server.
+ * This is useful for debugging the Next.js app itself without the Handoff overlay.
+ *
* @param handoff
*/
export declare const devApp: (handoff: Handoff) => Promise;
diff --git a/dist/app.js b/dist/app.js
index 360655d3..e866f844 100644
--- a/dist/app.js
+++ b/dist/app.js
@@ -36,7 +36,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.devApp = exports.watchApp = void 0;
-const chalk_1 = __importDefault(require("chalk"));
const chokidar_1 = __importDefault(require("chokidar"));
const cross_spawn_1 = __importDefault(require("cross-spawn"));
const fs_extra_1 = __importDefault(require("fs-extra"));
@@ -48,6 +47,7 @@ const ws_1 = require("ws");
const config_1 = require("./config");
const pipeline_1 = require("./pipeline");
const builder_1 = __importStar(require("./transformers/preview/component/builder"));
+const logger_1 = require("./utils/logger");
/**
* Creates a WebSocket server that broadcasts messages to connected clients.
* Designed for development mode to help with hot-reloading.
@@ -66,7 +66,7 @@ const createWebSocketServer = (...args_1) => __awaiter(void 0, [...args_1], void
const extWs = ws;
extWs.isAlive = true;
extWs.send(JSON.stringify({ type: 'WELCOME' }));
- extWs.on('error', (error) => console.error('WebSocket error:', error));
+ extWs.on('error', (error) => logger_1.Logger.error('WebSocket error:', error));
extWs.on('pong', heartbeat);
});
// Periodically ping clients to ensure they are still connected
@@ -74,7 +74,7 @@ const createWebSocketServer = (...args_1) => __awaiter(void 0, [...args_1], void
wss.clients.forEach((client) => {
const extWs = client;
if (!extWs.isAlive) {
- console.log(chalk_1.default.yellow('Terminating inactive client'));
+ logger_1.Logger.warn('Terminating inactive client');
return client.terminate();
}
extWs.isAlive = false;
@@ -85,10 +85,10 @@ const createWebSocketServer = (...args_1) => __awaiter(void 0, [...args_1], void
wss.on('close', () => {
clearInterval(pingInterval);
});
- console.log(chalk_1.default.green(`WebSocket server started on ws://localhost:${port}`));
+ logger_1.Logger.success(`WebSocket server listening on ws://localhost:${port}`);
// Return a function to broadcast a message to all connected clients
return (message) => {
- console.log(chalk_1.default.green(`Broadcasting message to ${wss.clients.size} client(s)`));
+ logger_1.Logger.success(`Broadcasting message to ${wss.clients.size} client(s)`);
wss.clients.forEach((client) => {
if (client.readyState === ws_1.WebSocket.OPEN) {
client.send(message);
@@ -124,11 +124,11 @@ const getAppPath = (handoff) => {
* Copy the public dir from the working dir to the module dir
* @param handoff
*/
-const mergePublicDir = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
+const syncPublicFiles = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
const appPath = getAppPath(handoff);
const workingPublicPath = getWorkingPublicPath(handoff);
if (workingPublicPath) {
- fs_extra_1.default.copySync(workingPublicPath, path_1.default.resolve(appPath, 'public'), { overwrite: true });
+ yield fs_extra_1.default.copy(workingPublicPath, path_1.default.resolve(appPath, 'public'), { overwrite: true });
}
});
/**
@@ -138,53 +138,64 @@ const mergePublicDir = (handoff) => __awaiter(void 0, void 0, void 0, function*
* @param handoff - The Handoff instance containing configuration and working paths
* @returns Promise that resolves when cleanup is complete
*/
-const performCleanup = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
+const cleanupAppDirectory = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
const appPath = getAppPath(handoff);
// Clean project app dir
if (fs_extra_1.default.existsSync(appPath)) {
- yield fs_extra_1.default.rm(appPath, { recursive: true });
+ yield fs_extra_1.default.remove(appPath);
}
});
-const publishTokensApi = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
+/**
+ * Publishes the tokens API files to the public directory.
+ *
+ * @param handoff - The Handoff instance
+ */
+const generateTokensApi = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
const apiPath = path_1.default.resolve(path_1.default.join(handoff.workingPath, 'public/api'));
- if (!fs_extra_1.default.existsSync(apiPath)) {
- fs_extra_1.default.mkdirSync(apiPath, { recursive: true });
- }
+ yield fs_extra_1.default.ensureDir(apiPath);
const tokens = yield handoff.getDocumentationObject();
// Early return if no tokens
if (!tokens) {
// Write empty tokens.json for API consistency
- fs_extra_1.default.writeFileSync(path_1.default.join(apiPath, 'tokens.json'), JSON.stringify({}, null, 2));
+ yield fs_extra_1.default.writeJson(path_1.default.join(apiPath, 'tokens.json'), {}, { spaces: 2 });
return;
}
- fs_extra_1.default.writeFileSync(path_1.default.join(apiPath, 'tokens.json'), JSON.stringify(tokens, null, 2));
- if (!fs_extra_1.default.existsSync(path_1.default.join(apiPath, 'tokens'))) {
- fs_extra_1.default.mkdirSync(path_1.default.join(apiPath, 'tokens'), { recursive: true });
- }
+ yield fs_extra_1.default.writeJson(path_1.default.join(apiPath, 'tokens.json'), tokens, { spaces: 2 });
+ const tokensDir = path_1.default.join(apiPath, 'tokens');
+ yield fs_extra_1.default.ensureDir(tokensDir);
// Only iterate if tokens has properties
if (tokens && typeof tokens === 'object') {
+ const promises = [];
for (const type in tokens) {
if (type === 'timestamp' || !tokens[type] || typeof tokens[type] !== 'object')
continue;
for (const group in tokens[type]) {
if (tokens[type][group]) {
- fs_extra_1.default.writeFileSync(path_1.default.join(apiPath, 'tokens', `${group}.json`), JSON.stringify(tokens[type][group], null, 2));
+ promises.push(fs_extra_1.default.writeJson(path_1.default.join(tokensDir, `${group}.json`), tokens[type][group], { spaces: 2 }));
}
}
}
+ yield Promise.all(promises);
}
});
-const prepareProjectApp = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
+/**
+ * Prepares the project application by copying source files and configuring Next.js.
+ *
+ * @param handoff - The Handoff instance
+ * @returns The path to the prepared application directory
+ */
+const initializeProjectApp = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
var _a, _b, _c, _d;
const srcPath = path_1.default.resolve(handoff.modulePath, 'src', 'app');
const appPath = getAppPath(handoff);
// Publish tokens API
- publishTokensApi(handoff);
+ yield generateTokensApi(handoff);
// Prepare project app dir
- yield fs_extra_1.default.promises.mkdir(appPath, { recursive: true });
+ yield fs_extra_1.default.ensureDir(appPath);
yield fs_extra_1.default.copy(srcPath, appPath, { overwrite: true });
- yield mergePublicDir(handoff);
+ yield syncPublicFiles(handoff);
// Prepare project app configuration
+ // Warning: Regex replacement is fragile and depends on exact formatting in next.config.mjs
const handoffProjectId = handoff.getProjectId();
const handoffAppBasePath = (_a = handoff.config.app.base_path) !== null && _a !== void 0 ? _a : '';
const handoffWorkingPath = path_1.default.resolve(handoff.workingPath);
@@ -205,14 +216,260 @@ const prepareProjectApp = (handoff) => __awaiter(void 0, void 0, void 0, functio
yield fs_extra_1.default.writeFile(nextConfigPath, nextConfigContent);
return appPath;
});
-const persistRuntimeCache = (handoff) => {
+/**
+ * Persists the client config to a JSON file.
+ *
+ * @param handoff - The Handoff instance
+ */
+const persistClientConfig = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
const appPath = getAppPath(handoff);
- const destination = path_1.default.resolve(appPath, 'runtime.cache.json');
+ const destination = path_1.default.resolve(appPath, 'client.config.json');
// Ensure directory exists
- if (!fs_extra_1.default.existsSync(appPath)) {
- fs_extra_1.default.mkdirSync(appPath, { recursive: true });
+ yield fs_extra_1.default.ensureDir(appPath);
+ yield fs_extra_1.default.writeJson(destination, { config: (0, config_1.getClientConfig)(handoff) }, { spaces: 2 });
+});
+/**
+ * Watches the working public directory for changes and updates the app.
+ *
+ * @param handoff - The Handoff instance
+ * @param wss - The WebSocket broadcaster
+ * @param state - The shared watcher state
+ * @param chokidarConfig - Configuration for chokidar
+ */
+const watchPublicDirectory = (handoff, wss, state, chokidarConfig) => {
+ if (fs_extra_1.default.existsSync(path_1.default.resolve(handoff.workingPath, 'public'))) {
+ chokidar_1.default.watch(path_1.default.resolve(handoff.workingPath, 'public'), chokidarConfig).on('all', (event, path) => __awaiter(void 0, void 0, void 0, function* () {
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ if (!state.debounce) {
+ state.debounce = true;
+ try {
+ logger_1.Logger.warn('Public directory changed. Handoff will ingest the new data...');
+ yield syncPublicFiles(handoff);
+ wss(JSON.stringify({ type: 'reload' }));
+ }
+ catch (e) {
+ logger_1.Logger.error('Error syncing public directory:', e);
+ }
+ finally {
+ state.debounce = false;
+ }
+ }
+ break;
+ }
+ }));
+ }
+};
+/**
+ * Watches the application source code for changes.
+ *
+ * @param handoff - The Handoff instance
+ */
+const watchAppSource = (handoff) => {
+ chokidar_1.default
+ .watch(path_1.default.resolve(handoff.modulePath, 'src', 'app'), {
+ ignored: /(^|[\/\\])\../, // ignore dotfiles
+ persistent: true,
+ ignoreInitial: true,
+ })
+ .on('all', (event, path) => __awaiter(void 0, void 0, void 0, function* () {
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ try {
+ yield initializeProjectApp(handoff);
+ }
+ catch (e) {
+ logger_1.Logger.error('Error initializing project app:', e);
+ }
+ break;
+ }
+ }));
+};
+/**
+ * Watches the user's pages directory for changes.
+ *
+ * @param handoff - The Handoff instance
+ * @param chokidarConfig - Configuration for chokidar
+ */
+const watchPages = (handoff, chokidarConfig) => {
+ if (fs_extra_1.default.existsSync(path_1.default.resolve(handoff.workingPath, 'pages'))) {
+ chokidar_1.default.watch(path_1.default.resolve(handoff.workingPath, 'pages'), chokidarConfig).on('all', (event, path) => __awaiter(void 0, void 0, void 0, function* () {
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ try {
+ logger_1.Logger.warn(`Doc page ${event}ed. Please reload browser to see changes...`);
+ logger_1.Logger.debug(`Path: ${path}`);
+ }
+ catch (e) {
+ logger_1.Logger.error('Error watching pages:', e);
+ }
+ break;
+ }
+ }));
+ }
+};
+/**
+ * Watches the SCSS entry point for changes.
+ *
+ * @param handoff - The Handoff instance
+ * @param state - The shared watcher state
+ * @param chokidarConfig - Configuration for chokidar
+ */
+const watchScss = (handoff, state, chokidarConfig) => __awaiter(void 0, void 0, void 0, function* () {
+ var _a, _b, _c, _d;
+ if (((_b = (_a = handoff.runtimeConfig) === null || _a === void 0 ? void 0 : _a.entries) === null || _b === void 0 ? void 0 : _b.scss) && fs_extra_1.default.existsSync((_d = (_c = handoff.runtimeConfig) === null || _c === void 0 ? void 0 : _c.entries) === null || _d === void 0 ? void 0 : _d.scss)) {
+ const stat = yield fs_extra_1.default.stat(handoff.runtimeConfig.entries.scss);
+ chokidar_1.default
+ .watch(stat.isDirectory() ? handoff.runtimeConfig.entries.scss : path_1.default.dirname(handoff.runtimeConfig.entries.scss), chokidarConfig)
+ .on('all', (event, file) => __awaiter(void 0, void 0, void 0, function* () {
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ if (!state.debounce) {
+ state.debounce = true;
+ try {
+ yield handoff.getSharedStyles();
+ }
+ catch (e) {
+ logger_1.Logger.error('Error processing shared styles:', e);
+ }
+ finally {
+ state.debounce = false;
+ }
+ }
+ }
+ }));
+ }
+});
+/**
+ * Maps configuration entry types to component segments.
+ */
+const mapEntryTypeToSegment = (type) => {
+ return {
+ js: builder_1.ComponentSegment.JavaScript,
+ scss: builder_1.ComponentSegment.Style,
+ template: builder_1.ComponentSegment.Previews,
+ templates: builder_1.ComponentSegment.Previews,
+ }[type];
+};
+/**
+ * Gets the paths of runtime components to watch.
+ *
+ * @param handoff - The Handoff instance
+ * @returns A Map of paths to watch and their entry types
+ */
+const getRuntimeComponentsPathsToWatch = (handoff) => {
+ var _a, _b, _c;
+ const result = new Map();
+ for (const runtimeComponentId of Object.keys((_b = (_a = handoff.runtimeConfig) === null || _a === void 0 ? void 0 : _a.entries.components) !== null && _b !== void 0 ? _b : {})) {
+ for (const runtimeComponentVersion of Object.keys(handoff.runtimeConfig.entries.components[runtimeComponentId])) {
+ const runtimeComponent = handoff.runtimeConfig.entries.components[runtimeComponentId][runtimeComponentVersion];
+ for (const [runtimeComponentEntryType, runtimeComponentEntryPath] of Object.entries((_c = runtimeComponent.entries) !== null && _c !== void 0 ? _c : {})) {
+ const normalizedComponentEntryPath = runtimeComponentEntryPath;
+ if (fs_extra_1.default.existsSync(normalizedComponentEntryPath)) {
+ const entryType = runtimeComponentEntryType;
+ if (fs_extra_1.default.statSync(normalizedComponentEntryPath).isFile()) {
+ result.set(path_1.default.resolve(normalizedComponentEntryPath), entryType);
+ }
+ else {
+ result.set(normalizedComponentEntryPath, entryType);
+ }
+ }
+ }
+ }
+ }
+ return result;
+};
+/**
+ * Watches runtime components for changes.
+ *
+ * @param handoff - The Handoff instance
+ * @param state - The shared watcher state
+ * @param runtimeComponentPathsToWatch - Map of paths to watch
+ */
+const watchRuntimeComponents = (handoff, state, runtimeComponentPathsToWatch) => {
+ if (state.runtimeComponentsWatcher) {
+ state.runtimeComponentsWatcher.close();
+ }
+ if (runtimeComponentPathsToWatch.size > 0) {
+ const pathsToWatch = Array.from(runtimeComponentPathsToWatch.keys());
+ state.runtimeComponentsWatcher = chokidar_1.default.watch(pathsToWatch, { ignoreInitial: true });
+ state.runtimeComponentsWatcher.on('all', (event, file) => __awaiter(void 0, void 0, void 0, function* () {
+ if (handoff.getConfigFilePaths().includes(file)) {
+ return;
+ }
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ if (!state.debounce) {
+ state.debounce = true;
+ try {
+ const entryType = runtimeComponentPathsToWatch.get(file);
+ const segmentToUpdate = entryType ? mapEntryTypeToSegment(entryType) : undefined;
+ const componentDir = path_1.default.basename(path_1.default.dirname(path_1.default.dirname(file)));
+ yield (0, builder_1.default)(handoff, componentDir, segmentToUpdate);
+ }
+ catch (e) {
+ logger_1.Logger.error('Error processing component:', e);
+ }
+ finally {
+ state.debounce = false;
+ }
+ }
+ break;
+ }
+ }));
+ }
+};
+/**
+ * Watches the runtime configuration for changes.
+ *
+ * @param handoff - The Handoff instance
+ * @param state - The shared watcher state
+ */
+const watchRuntimeConfiguration = (handoff, state) => {
+ if (state.runtimeConfigurationWatcher) {
+ state.runtimeConfigurationWatcher.close();
+ }
+ if (handoff.getConfigFilePaths().length > 0) {
+ state.runtimeConfigurationWatcher = chokidar_1.default.watch(handoff.getConfigFilePaths(), { ignoreInitial: true });
+ state.runtimeConfigurationWatcher.on('all', (event, file) => __awaiter(void 0, void 0, void 0, function* () {
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ if (!state.debounce) {
+ state.debounce = true;
+ try {
+ file = path_1.default.dirname(path_1.default.dirname(file));
+ // Reload the Handoff instance to pick up configuration changes
+ handoff.reload();
+ // After reloading, persist the updated client configuration
+ yield persistClientConfig(handoff);
+ // Restart the runtime components watcher to track potentially updated/added/removed components
+ watchRuntimeComponents(handoff, state, getRuntimeComponentsPathsToWatch(handoff));
+ // Process components based on the updated configuration and file path
+ yield (0, builder_1.default)(handoff, path_1.default.basename(file));
+ }
+ catch (e) {
+ logger_1.Logger.error('Error reloading runtime configuration:', e);
+ }
+ finally {
+ state.debounce = false;
+ }
+ }
+ break;
+ }
+ }));
}
- fs_extra_1.default.writeFileSync(destination, JSON.stringify(Object.assign({ config: (0, config_1.getClientConfig)(handoff) }, handoff.runtimeConfig), null, 2), 'utf-8');
};
/**
* Build the next js application
@@ -221,12 +478,12 @@ const persistRuntimeCache = (handoff) => {
*/
const buildApp = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
// Perform cleanup
- yield performCleanup(handoff);
+ yield cleanupAppDirectory(handoff);
// Build components
yield (0, pipeline_1.buildComponents)(handoff);
// Prepare app
- const appPath = yield prepareProjectApp(handoff);
- persistRuntimeCache(handoff);
+ const appPath = yield initializeProjectApp(handoff);
+ yield persistClientConfig(handoff);
// Build app
const buildResult = cross_spawn_1.default.sync('npx', ['next', 'build'], {
cwd: appPath,
@@ -242,42 +499,31 @@ const buildApp = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
}
// Ensure output root directory exists
const outputRoot = path_1.default.resolve(handoff.workingPath, handoff.sitesDirectory);
- if (!fs_extra_1.default.existsSync(outputRoot)) {
- fs_extra_1.default.mkdirSync(outputRoot, { recursive: true });
- }
+ yield fs_extra_1.default.ensureDir(outputRoot);
// Clean the project output directory (if exists)
const output = path_1.default.resolve(outputRoot, handoff.getProjectId());
if (fs_extra_1.default.existsSync(output)) {
- fs_extra_1.default.removeSync(output);
+ yield fs_extra_1.default.remove(output);
}
// Copy the build files into the project output directory
- fs_extra_1.default.copySync(path_1.default.resolve(appPath, 'out'), output);
+ yield fs_extra_1.default.copy(path_1.default.resolve(appPath, 'out'), output);
});
/**
- * Watch the next js application
+ * Watch the next js application.
+ * Starts a custom dev server with Handoff-specific watchers and hot-reloading.
+ *
* @param handoff
*/
const watchApp = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
- var _a, _b, _c, _d, _e, _f, _g, _h;
- // Initial processing of the components
- yield (0, builder_1.default)(handoff);
- const appPath = yield prepareProjectApp(handoff);
- // Include any changes made within the app source during watch
- chokidar_1.default
- .watch(path_1.default.resolve(handoff.modulePath, 'src', 'app'), {
- ignored: /(^|[\/\\])\../, // ignore dotfiles
- persistent: true,
- ignoreInitial: true,
- })
- .on('all', (event, path) => __awaiter(void 0, void 0, void 0, function* () {
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- yield prepareProjectApp(handoff);
- break;
- }
- }));
+ var _a, _b, _c, _d;
+ // Initial processing of the components with caching enabled
+ // This will skip rebuilding components whose source files haven't changed
+ yield (0, builder_1.default)(handoff, undefined, undefined, { useCache: true });
+ const appPath = yield initializeProjectApp(handoff);
+ // Persist client configuration
+ yield persistClientConfig(handoff);
+ // Watch app source
+ watchAppSource(handoff);
// // does a ts config exist?
// let tsconfigPath = 'tsconfig.json';
// config.typescript = {
@@ -299,7 +545,7 @@ const watchApp = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
// purge out cache
const moduleOutput = path_1.default.resolve(appPath, 'out');
if (fs_extra_1.default.existsSync(moduleOutput)) {
- fs_extra_1.default.removeSync(moduleOutput);
+ yield fs_extra_1.default.remove(moduleOutput);
}
app.prepare().then(() => {
(0, http_1.createServer)((req, res) => __awaiter(void 0, void 0, void 0, function* () {
@@ -313,17 +559,17 @@ const watchApp = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
yield handle(req, res, parsedUrl);
}
catch (err) {
- console.error('Error occurred handling', req.url, err);
+ logger_1.Logger.error(`Error occurred handling ${req.url}`, err);
res.statusCode = 500;
res.end('internal server error');
}
}))
.once('error', (err) => {
- console.error(err);
+ logger_1.Logger.error(err);
process.exit(1);
})
.listen(port, () => {
- console.log(`> Ready on http://${hostname}:${port}`);
+ logger_1.Logger.log(`Ready on http://${hostname}:${port}`);
});
});
const wss = yield createWebSocketServer((_d = (_c = handoff.config.app.ports) === null || _c === void 0 ? void 0 : _c.websocket) !== null && _d !== void 0 ? _d : 3001);
@@ -332,155 +578,35 @@ const watchApp = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
persistent: true,
ignoreInitial: true,
};
- let debounce = false;
- if (fs_extra_1.default.existsSync(path_1.default.resolve(handoff.workingPath, 'public'))) {
- chokidar_1.default.watch(path_1.default.resolve(handoff.workingPath, 'public'), chokidarConfig).on('all', (event, path) => __awaiter(void 0, void 0, void 0, function* () {
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- if (!debounce) {
- debounce = true;
- console.log(chalk_1.default.yellow('Public directory changed. Handoff will ingest the new data...'));
- yield mergePublicDir(handoff);
- wss(JSON.stringify({ type: 'reload' }));
- debounce = false;
- }
- break;
- }
- }));
- }
- let runtimeComponentsWatcher = null;
- let runtimeConfigurationWatcher = null;
- const entryTypeToSegment = (type) => {
- return {
- js: builder_1.ComponentSegment.JavaScript,
- scss: builder_1.ComponentSegment.Style,
- template: builder_1.ComponentSegment.Previews,
- templates: builder_1.ComponentSegment.Previews,
- }[type];
+ const state = {
+ debounce: false,
+ runtimeComponentsWatcher: null,
+ runtimeConfigurationWatcher: null,
};
- const watchRuntimeComponents = (runtimeComponentPathsToWatch) => {
- persistRuntimeCache(handoff);
- if (runtimeComponentsWatcher) {
- runtimeComponentsWatcher.close();
- }
- if (runtimeComponentPathsToWatch.size > 0) {
- const pathsToWatch = Array.from(runtimeComponentPathsToWatch.keys());
- runtimeComponentsWatcher = chokidar_1.default.watch(pathsToWatch, { ignoreInitial: true });
- runtimeComponentsWatcher.on('all', (event, file) => __awaiter(void 0, void 0, void 0, function* () {
- if (handoff.getConfigFilePaths().includes(file)) {
- return;
- }
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- if (!debounce) {
- debounce = true;
- const entryType = runtimeComponentPathsToWatch.get(file);
- const segmentToUpdate = entryType ? entryTypeToSegment(entryType) : undefined;
- const componentDir = path_1.default.basename(path_1.default.dirname(path_1.default.dirname(file)));
- yield (0, builder_1.default)(handoff, componentDir, segmentToUpdate);
- debounce = false;
- }
- break;
- }
- }));
- }
- };
- const watchRuntimeConfiguration = () => {
- if (runtimeConfigurationWatcher) {
- runtimeConfigurationWatcher.close();
- }
- if (handoff.getConfigFilePaths().length > 0) {
- runtimeConfigurationWatcher = chokidar_1.default.watch(handoff.getConfigFilePaths(), { ignoreInitial: true });
- runtimeConfigurationWatcher.on('all', (event, file) => __awaiter(void 0, void 0, void 0, function* () {
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- if (!debounce) {
- debounce = true;
- file = path_1.default.dirname(path_1.default.dirname(file));
- handoff.reload();
- watchRuntimeComponents(getRuntimeComponentsPathsToWatch());
- yield (0, builder_1.default)(handoff, path_1.default.basename(file));
- debounce = false;
- }
- break;
- }
- }));
- }
- };
- const getRuntimeComponentsPathsToWatch = () => {
- var _a, _b, _c;
- const result = new Map();
- for (const runtimeComponentId of Object.keys((_b = (_a = handoff.runtimeConfig) === null || _a === void 0 ? void 0 : _a.entries.components) !== null && _b !== void 0 ? _b : {})) {
- for (const runtimeComponentVersion of Object.keys(handoff.runtimeConfig.entries.components[runtimeComponentId])) {
- const runtimeComponent = handoff.runtimeConfig.entries.components[runtimeComponentId][runtimeComponentVersion];
- for (const [runtimeComponentEntryType, runtimeComponentEntryPath] of Object.entries((_c = runtimeComponent.entries) !== null && _c !== void 0 ? _c : {})) {
- const normalizedComponentEntryPath = runtimeComponentEntryPath;
- if (fs_extra_1.default.existsSync(normalizedComponentEntryPath)) {
- const entryType = runtimeComponentEntryType;
- if (fs_extra_1.default.statSync(normalizedComponentEntryPath).isFile()) {
- result.set(path_1.default.resolve(normalizedComponentEntryPath), entryType);
- }
- else {
- result.set(normalizedComponentEntryPath, entryType);
- }
- }
- }
- }
- }
- return result;
- };
- watchRuntimeComponents(getRuntimeComponentsPathsToWatch());
- watchRuntimeConfiguration();
- if (((_f = (_e = handoff.runtimeConfig) === null || _e === void 0 ? void 0 : _e.entries) === null || _f === void 0 ? void 0 : _f.scss) && fs_extra_1.default.existsSync((_h = (_g = handoff.runtimeConfig) === null || _g === void 0 ? void 0 : _g.entries) === null || _h === void 0 ? void 0 : _h.scss)) {
- const stat = yield fs_extra_1.default.stat(handoff.runtimeConfig.entries.scss);
- chokidar_1.default
- .watch(stat.isDirectory() ? handoff.runtimeConfig.entries.scss : path_1.default.dirname(handoff.runtimeConfig.entries.scss), chokidarConfig)
- .on('all', (event, file) => __awaiter(void 0, void 0, void 0, function* () {
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- if (!debounce) {
- debounce = true;
- yield handoff.getSharedStyles();
- debounce = false;
- }
- }
- }));
- }
- if (fs_extra_1.default.existsSync(path_1.default.resolve(handoff.workingPath, 'pages'))) {
- chokidar_1.default.watch(path_1.default.resolve(handoff.workingPath, 'pages'), chokidarConfig).on('all', (event, path) => __awaiter(void 0, void 0, void 0, function* () {
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- console.log(chalk_1.default.yellow(`Doc page ${event}ed. Please reload browser to see changes...`), path);
- break;
- }
- }));
- }
+ watchPublicDirectory(handoff, wss, state, chokidarConfig);
+ watchRuntimeComponents(handoff, state, getRuntimeComponentsPathsToWatch(handoff));
+ watchRuntimeConfiguration(handoff, state);
+ yield watchScss(handoff, state, chokidarConfig);
+ watchPages(handoff, chokidarConfig);
});
exports.watchApp = watchApp;
/**
- * Watch the next js application
+ * Watch the next js application using the standard Next.js dev server.
+ * This is useful for debugging the Next.js app itself without the Handoff overlay.
+ *
* @param handoff
*/
const devApp = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
var _a, _b;
// Prepare app
- const appPath = yield prepareProjectApp(handoff);
+ const appPath = yield initializeProjectApp(handoff);
// Purge app cache
const moduleOutput = path_1.default.resolve(appPath, 'out');
if (fs_extra_1.default.existsSync(moduleOutput)) {
- fs_extra_1.default.removeSync(moduleOutput);
+ yield fs_extra_1.default.remove(moduleOutput);
}
- persistRuntimeCache(handoff);
+ // Persist client configuration
+ yield persistClientConfig(handoff);
// Run
const devResult = cross_spawn_1.default.sync('npx', ['next', 'dev', '--port', String((_b = (_a = handoff.config.app.ports) === null || _a === void 0 ? void 0 : _a.app) !== null && _b !== void 0 ? _b : 3000)], {
cwd: appPath,
diff --git a/dist/cache/build-cache.d.ts b/dist/cache/build-cache.d.ts
new file mode 100644
index 00000000..bcc7829a
--- /dev/null
+++ b/dist/cache/build-cache.d.ts
@@ -0,0 +1,99 @@
+import Handoff from '../index';
+import { FileState } from './file-state';
+/**
+ * Cache entry for a single component version
+ */
+export interface ComponentCacheEntry {
+ /** File states for all source files of this component */
+ files: Record;
+ /** States for template directory files (if templates is a directory) */
+ templateDirFiles?: Record;
+ /** Timestamp when this component was last built */
+ buildTimestamp: number;
+}
+/**
+ * State of global dependencies that affect all components
+ */
+export interface GlobalDepsState {
+ /** tokens.json file state */
+ tokens?: FileState;
+ /** shared.scss or shared.css file state */
+ sharedStyles?: FileState;
+ /** Global SCSS entry file state */
+ globalScss?: FileState;
+ /** Global JS entry file state */
+ globalJs?: FileState;
+}
+/**
+ * Complete build cache structure
+ */
+export interface BuildCache {
+ /** Cache format version for invalidation on structure changes */
+ version: string;
+ /** State of global dependencies at last build */
+ globalDeps: GlobalDepsState;
+ /** Per-component cache entries: componentId -> version -> entry */
+ components: Record>;
+}
+/**
+ * Gets the path to the build cache file
+ */
+export declare function getCachePath(handoff: Handoff): string;
+/**
+ * Loads the build cache from disk
+ * @returns The cached data or null if cache doesn't exist or is invalid
+ */
+export declare function loadBuildCache(handoff: Handoff): Promise;
+/**
+ * Saves the build cache to disk
+ * Uses atomic write (temp file + rename) to prevent corruption
+ */
+export declare function saveBuildCache(handoff: Handoff, cache: BuildCache): Promise;
+/**
+ * Computes the current state of global dependencies
+ */
+export declare function computeGlobalDepsState(handoff: Handoff): Promise;
+/**
+ * Checks if global dependencies have changed
+ */
+export declare function haveGlobalDepsChanged(cached: GlobalDepsState | null | undefined, current: GlobalDepsState): boolean;
+/**
+ * Gets all file paths that should be tracked for a component
+ */
+export declare function getComponentFilePaths(handoff: Handoff, componentId: string, version: string): {
+ files: string[];
+ templateDir?: string;
+};
+/**
+ * Computes current file states for a component
+ */
+export declare function computeComponentFileStates(handoff: Handoff, componentId: string, version: string): Promise<{
+ files: Record;
+ templateDirFiles?: Record;
+}>;
+/**
+ * Checks if a component needs to be rebuilt based on file states
+ */
+export declare function hasComponentChanged(cached: ComponentCacheEntry | null | undefined, current: {
+ files: Record;
+ templateDirFiles?: Record;
+}): boolean;
+/**
+ * Checks if the component output files exist
+ */
+export declare function checkOutputExists(handoff: Handoff, componentId: string, version: string): Promise;
+/**
+ * Creates an empty cache structure
+ */
+export declare function createEmptyCache(): BuildCache;
+/**
+ * Updates cache entry for a specific component version
+ */
+export declare function updateComponentCacheEntry(cache: BuildCache, componentId: string, version: string, fileStates: {
+ files: Record;
+ templateDirFiles?: Record;
+}): void;
+/**
+ * Removes components from cache that are no longer in runtime config
+ */
+export declare function pruneRemovedComponents(cache: BuildCache, currentComponentIds: string[]): void;
diff --git a/dist/cache/build-cache.js b/dist/cache/build-cache.js
new file mode 100644
index 00000000..1772dc85
--- /dev/null
+++ b/dist/cache/build-cache.js
@@ -0,0 +1,290 @@
+"use strict";
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getCachePath = getCachePath;
+exports.loadBuildCache = loadBuildCache;
+exports.saveBuildCache = saveBuildCache;
+exports.computeGlobalDepsState = computeGlobalDepsState;
+exports.haveGlobalDepsChanged = haveGlobalDepsChanged;
+exports.getComponentFilePaths = getComponentFilePaths;
+exports.computeComponentFileStates = computeComponentFileStates;
+exports.hasComponentChanged = hasComponentChanged;
+exports.checkOutputExists = checkOutputExists;
+exports.createEmptyCache = createEmptyCache;
+exports.updateComponentCacheEntry = updateComponentCacheEntry;
+exports.pruneRemovedComponents = pruneRemovedComponents;
+const fs_extra_1 = __importDefault(require("fs-extra"));
+const path_1 = __importDefault(require("path"));
+const logger_1 = require("../utils/logger");
+const file_state_1 = require("./file-state");
+/** Current cache format version - bump when structure changes */
+const CACHE_VERSION = '1.0.0';
+/**
+ * Gets the path to the build cache file
+ */
+function getCachePath(handoff) {
+ return path_1.default.resolve(handoff.modulePath, '.handoff', handoff.getProjectId(), '.cache', 'build-cache.json');
+}
+/**
+ * Loads the build cache from disk
+ * @returns The cached data or null if cache doesn't exist or is invalid
+ */
+function loadBuildCache(handoff) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const cachePath = getCachePath(handoff);
+ try {
+ if (!(yield fs_extra_1.default.pathExists(cachePath))) {
+ logger_1.Logger.debug('No existing build cache found');
+ return null;
+ }
+ const data = yield fs_extra_1.default.readJson(cachePath);
+ // Validate cache version
+ if (data.version !== CACHE_VERSION) {
+ logger_1.Logger.debug(`Build cache version mismatch (${data.version} vs ${CACHE_VERSION}), invalidating`);
+ return null;
+ }
+ return data;
+ }
+ catch (error) {
+ logger_1.Logger.debug('Failed to load build cache, will rebuild all components:', error);
+ return null;
+ }
+ });
+}
+/**
+ * Saves the build cache to disk
+ * Uses atomic write (temp file + rename) to prevent corruption
+ */
+function saveBuildCache(handoff, cache) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const cachePath = getCachePath(handoff);
+ const cacheDir = path_1.default.dirname(cachePath);
+ const tempPath = `${cachePath}.tmp`;
+ try {
+ yield fs_extra_1.default.ensureDir(cacheDir);
+ yield fs_extra_1.default.writeJson(tempPath, cache, { spaces: 2 });
+ yield fs_extra_1.default.rename(tempPath, cachePath);
+ logger_1.Logger.debug('Build cache saved');
+ }
+ catch (error) {
+ logger_1.Logger.debug('Failed to save build cache:', error);
+ // Clean up temp file if it exists
+ try {
+ yield fs_extra_1.default.remove(tempPath);
+ }
+ catch (_a) {
+ // Ignore cleanup errors
+ }
+ }
+ });
+}
+/**
+ * Computes the current state of global dependencies
+ */
+function computeGlobalDepsState(handoff) {
+ return __awaiter(this, void 0, void 0, function* () {
+ var _a, _b, _c, _d, _e, _f, _g, _h;
+ const result = {};
+ // tokens.json
+ const tokensPath = handoff.getTokensFilePath();
+ result.tokens = (_a = (yield (0, file_state_1.computeFileState)(tokensPath))) !== null && _a !== void 0 ? _a : undefined;
+ // shared.scss or shared.css
+ const sharedScssPath = path_1.default.resolve(handoff.workingPath, 'integration/components/shared.scss');
+ const sharedCssPath = path_1.default.resolve(handoff.workingPath, 'integration/components/shared.css');
+ const sharedScssState = yield (0, file_state_1.computeFileState)(sharedScssPath);
+ const sharedCssState = yield (0, file_state_1.computeFileState)(sharedCssPath);
+ result.sharedStyles = (_b = sharedScssState !== null && sharedScssState !== void 0 ? sharedScssState : sharedCssState) !== null && _b !== void 0 ? _b : undefined;
+ // Global SCSS entry
+ if ((_d = (_c = handoff.runtimeConfig) === null || _c === void 0 ? void 0 : _c.entries) === null || _d === void 0 ? void 0 : _d.scss) {
+ result.globalScss = (_e = (yield (0, file_state_1.computeFileState)(handoff.runtimeConfig.entries.scss))) !== null && _e !== void 0 ? _e : undefined;
+ }
+ // Global JS entry
+ if ((_g = (_f = handoff.runtimeConfig) === null || _f === void 0 ? void 0 : _f.entries) === null || _g === void 0 ? void 0 : _g.js) {
+ result.globalJs = (_h = (yield (0, file_state_1.computeFileState)(handoff.runtimeConfig.entries.js))) !== null && _h !== void 0 ? _h : undefined;
+ }
+ return result;
+ });
+}
+/**
+ * Checks if global dependencies have changed
+ */
+function haveGlobalDepsChanged(cached, current) {
+ if (!cached)
+ return true;
+ // Check each global dependency
+ if (!(0, file_state_1.statesMatch)(cached.tokens, current.tokens)) {
+ logger_1.Logger.debug('Global dependency changed: tokens.json');
+ return true;
+ }
+ if (!(0, file_state_1.statesMatch)(cached.sharedStyles, current.sharedStyles)) {
+ logger_1.Logger.debug('Global dependency changed: shared styles');
+ return true;
+ }
+ if (!(0, file_state_1.statesMatch)(cached.globalScss, current.globalScss)) {
+ logger_1.Logger.debug('Global dependency changed: global SCSS entry');
+ return true;
+ }
+ if (!(0, file_state_1.statesMatch)(cached.globalJs, current.globalJs)) {
+ logger_1.Logger.debug('Global dependency changed: global JS entry');
+ return true;
+ }
+ return false;
+}
+/**
+ * Gets all file paths that should be tracked for a component
+ */
+function getComponentFilePaths(handoff, componentId, version) {
+ var _a, _b, _c, _d;
+ const runtimeComponent = (_d = (_c = (_b = (_a = handoff.runtimeConfig) === null || _a === void 0 ? void 0 : _a.entries) === null || _b === void 0 ? void 0 : _b.components) === null || _c === void 0 ? void 0 : _c[componentId]) === null || _d === void 0 ? void 0 : _d[version];
+ if (!runtimeComponent) {
+ return { files: [] };
+ }
+ const files = [];
+ let templateDir;
+ // Find the config file path for this component
+ const configPaths = handoff.getConfigFilePaths();
+ for (const configPath of configPaths) {
+ // Check if this config path belongs to this component/version
+ if (configPath.includes(componentId) && configPath.includes(version)) {
+ files.push(configPath);
+ break;
+ }
+ }
+ // Add entry files
+ const entries = runtimeComponent.entries;
+ if (entries) {
+ if (entries.js) {
+ files.push(entries.js);
+ }
+ if (entries.scss) {
+ files.push(entries.scss);
+ }
+ // Handle both 'template' (singular) and 'templates' (plural) entry types
+ const templatePath = entries.template || entries.templates;
+ if (templatePath) {
+ try {
+ const stat = fs_extra_1.default.statSync(templatePath);
+ if (stat.isDirectory()) {
+ templateDir = templatePath;
+ }
+ else {
+ files.push(templatePath);
+ }
+ }
+ catch (_e) {
+ // File doesn't exist, still add to track
+ files.push(templatePath);
+ }
+ }
+ }
+ return { files, templateDir };
+}
+/**
+ * Computes current file states for a component
+ */
+function computeComponentFileStates(handoff, componentId, version) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const { files: filePaths, templateDir } = getComponentFilePaths(handoff, componentId, version);
+ const files = {};
+ for (const filePath of filePaths) {
+ const state = yield (0, file_state_1.computeFileState)(filePath);
+ if (state) {
+ files[filePath] = state;
+ }
+ }
+ let templateDirFiles;
+ if (templateDir) {
+ templateDirFiles = yield (0, file_state_1.computeDirectoryState)(templateDir, ['.hbs', '.html']);
+ }
+ return { files, templateDirFiles };
+ });
+}
+/**
+ * Checks if a component needs to be rebuilt based on file states
+ */
+function hasComponentChanged(cached, current) {
+ if (!cached) {
+ return true; // No cache entry means new component
+ }
+ // Check regular files
+ const cachedFiles = Object.keys(cached.files);
+ const currentFiles = Object.keys(current.files);
+ // Check if file count changed
+ if (cachedFiles.length !== currentFiles.length) {
+ return true;
+ }
+ // Check if any files were added or removed
+ const cachedSet = new Set(cachedFiles);
+ for (const file of currentFiles) {
+ if (!cachedSet.has(file)) {
+ return true;
+ }
+ }
+ // Check if any file states changed
+ for (const file of cachedFiles) {
+ if (!(0, file_state_1.statesMatch)(cached.files[file], current.files[file])) {
+ return true;
+ }
+ }
+ // Check template directory files if applicable
+ if (!(0, file_state_1.directoryStatesMatch)(cached.templateDirFiles, current.templateDirFiles)) {
+ return true;
+ }
+ return false;
+}
+/**
+ * Checks if the component output files exist
+ */
+function checkOutputExists(handoff, componentId, version) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const outputPath = path_1.default.resolve(handoff.workingPath, 'public/api/component', componentId, `${version}.json`);
+ return fs_extra_1.default.pathExists(outputPath);
+ });
+}
+/**
+ * Creates an empty cache structure
+ */
+function createEmptyCache() {
+ return {
+ version: CACHE_VERSION,
+ globalDeps: {},
+ components: {},
+ };
+}
+/**
+ * Updates cache entry for a specific component version
+ */
+function updateComponentCacheEntry(cache, componentId, version, fileStates) {
+ if (!cache.components[componentId]) {
+ cache.components[componentId] = {};
+ }
+ cache.components[componentId][version] = {
+ files: fileStates.files,
+ templateDirFiles: fileStates.templateDirFiles,
+ buildTimestamp: Date.now(),
+ };
+}
+/**
+ * Removes components from cache that are no longer in runtime config
+ */
+function pruneRemovedComponents(cache, currentComponentIds) {
+ const currentSet = new Set(currentComponentIds);
+ const cachedIds = Object.keys(cache.components);
+ for (const cachedId of cachedIds) {
+ if (!currentSet.has(cachedId)) {
+ logger_1.Logger.debug(`Pruning removed component from cache: ${cachedId}`);
+ delete cache.components[cachedId];
+ }
+ }
+}
diff --git a/dist/cache/file-state.d.ts b/dist/cache/file-state.d.ts
new file mode 100644
index 00000000..54ddcbc1
--- /dev/null
+++ b/dist/cache/file-state.d.ts
@@ -0,0 +1,36 @@
+/**
+ * Represents the state of a file for change detection
+ */
+export interface FileState {
+ /** File modification time in milliseconds */
+ mtime: number;
+ /** File size in bytes */
+ size: number;
+}
+/**
+ * Computes the current state (mtime, size) of a file
+ * @param filePath - Absolute path to the file
+ * @returns FileState if file exists, null otherwise
+ */
+export declare function computeFileState(filePath: string): Promise;
+/**
+ * Computes file states for all files in a directory (recursively)
+ * @param dirPath - Absolute path to the directory
+ * @param extensions - Optional array of file extensions to include (e.g., ['.hbs', '.html'])
+ * @returns Record mapping relative file paths to their states
+ */
+export declare function computeDirectoryState(dirPath: string, extensions?: string[]): Promise>;
+/**
+ * Compares two file states for equality
+ * @param a - First file state (can be null/undefined)
+ * @param b - Second file state (can be null/undefined)
+ * @returns true if states match, false otherwise
+ */
+export declare function statesMatch(a: FileState | null | undefined, b: FileState | null | undefined): boolean;
+/**
+ * Compares two records of file states
+ * @param cached - Previously cached file states
+ * @param current - Current file states
+ * @returns true if all states match, false if any differ or files added/removed
+ */
+export declare function directoryStatesMatch(cached: Record | null | undefined, current: Record | null | undefined): boolean;
diff --git a/dist/cache/file-state.js b/dist/cache/file-state.js
new file mode 100644
index 00000000..e8bf313d
--- /dev/null
+++ b/dist/cache/file-state.js
@@ -0,0 +1,132 @@
+"use strict";
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.computeFileState = computeFileState;
+exports.computeDirectoryState = computeDirectoryState;
+exports.statesMatch = statesMatch;
+exports.directoryStatesMatch = directoryStatesMatch;
+const fs_extra_1 = __importDefault(require("fs-extra"));
+const path_1 = __importDefault(require("path"));
+/**
+ * Computes the current state (mtime, size) of a file
+ * @param filePath - Absolute path to the file
+ * @returns FileState if file exists, null otherwise
+ */
+function computeFileState(filePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ const stats = yield fs_extra_1.default.stat(filePath);
+ if (!stats.isFile()) {
+ return null;
+ }
+ return {
+ mtime: stats.mtimeMs,
+ size: stats.size,
+ };
+ }
+ catch (_a) {
+ return null;
+ }
+ });
+}
+/**
+ * Computes file states for all files in a directory (recursively)
+ * @param dirPath - Absolute path to the directory
+ * @param extensions - Optional array of file extensions to include (e.g., ['.hbs', '.html'])
+ * @returns Record mapping relative file paths to their states
+ */
+function computeDirectoryState(dirPath, extensions) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const result = {};
+ try {
+ const stats = yield fs_extra_1.default.stat(dirPath);
+ if (!stats.isDirectory()) {
+ return result;
+ }
+ const entries = yield fs_extra_1.default.readdir(dirPath, { withFileTypes: true });
+ for (const entry of entries) {
+ const fullPath = path_1.default.join(dirPath, entry.name);
+ if (entry.isDirectory()) {
+ // Recursively process subdirectories
+ const subDirStates = yield computeDirectoryState(fullPath, extensions);
+ for (const [subPath, state] of Object.entries(subDirStates)) {
+ result[path_1.default.join(entry.name, subPath)] = state;
+ }
+ }
+ else if (entry.isFile()) {
+ // Check extension filter if provided
+ if (extensions && extensions.length > 0) {
+ const ext = path_1.default.extname(entry.name).toLowerCase();
+ if (!extensions.includes(ext)) {
+ continue;
+ }
+ }
+ const fileState = yield computeFileState(fullPath);
+ if (fileState) {
+ result[entry.name] = fileState;
+ }
+ }
+ }
+ }
+ catch (_a) {
+ // Directory doesn't exist or can't be read
+ }
+ return result;
+ });
+}
+/**
+ * Compares two file states for equality
+ * @param a - First file state (can be null/undefined)
+ * @param b - Second file state (can be null/undefined)
+ * @returns true if states match, false otherwise
+ */
+function statesMatch(a, b) {
+ if (!a && !b)
+ return true;
+ if (!a || !b)
+ return false;
+ return a.mtime === b.mtime && a.size === b.size;
+}
+/**
+ * Compares two records of file states
+ * @param cached - Previously cached file states
+ * @param current - Current file states
+ * @returns true if all states match, false if any differ or files added/removed
+ */
+function directoryStatesMatch(cached, current) {
+ if (!cached && !current)
+ return true;
+ if (!cached || !current)
+ return false;
+ const cachedKeys = Object.keys(cached);
+ const currentKeys = Object.keys(current);
+ // Check if file count differs
+ if (cachedKeys.length !== currentKeys.length) {
+ return false;
+ }
+ // Check if any files were added or removed
+ const cachedSet = new Set(cachedKeys);
+ for (const key of currentKeys) {
+ if (!cachedSet.has(key)) {
+ return false;
+ }
+ }
+ // Check if any file states changed
+ for (const key of cachedKeys) {
+ if (!statesMatch(cached[key], current[key])) {
+ return false;
+ }
+ }
+ return true;
+}
diff --git a/dist/cache/index.d.ts b/dist/cache/index.d.ts
new file mode 100644
index 00000000..d2984880
--- /dev/null
+++ b/dist/cache/index.d.ts
@@ -0,0 +1,2 @@
+export { computeDirectoryState, computeFileState, directoryStatesMatch, statesMatch, type FileState } from './file-state';
+export { checkOutputExists, computeComponentFileStates, computeGlobalDepsState, createEmptyCache, getCachePath, hasComponentChanged, haveGlobalDepsChanged, loadBuildCache, pruneRemovedComponents, saveBuildCache, updateComponentCacheEntry, type BuildCache, type ComponentCacheEntry, type GlobalDepsState, } from './build-cache';
diff --git a/dist/cache/index.js b/dist/cache/index.js
new file mode 100644
index 00000000..fccb7251
--- /dev/null
+++ b/dist/cache/index.js
@@ -0,0 +1,22 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.updateComponentCacheEntry = exports.saveBuildCache = exports.pruneRemovedComponents = exports.loadBuildCache = exports.haveGlobalDepsChanged = exports.hasComponentChanged = exports.getCachePath = exports.createEmptyCache = exports.computeGlobalDepsState = exports.computeComponentFileStates = exports.checkOutputExists = exports.statesMatch = exports.directoryStatesMatch = exports.computeFileState = exports.computeDirectoryState = void 0;
+// File state utilities
+var file_state_1 = require("./file-state");
+Object.defineProperty(exports, "computeDirectoryState", { enumerable: true, get: function () { return file_state_1.computeDirectoryState; } });
+Object.defineProperty(exports, "computeFileState", { enumerable: true, get: function () { return file_state_1.computeFileState; } });
+Object.defineProperty(exports, "directoryStatesMatch", { enumerable: true, get: function () { return file_state_1.directoryStatesMatch; } });
+Object.defineProperty(exports, "statesMatch", { enumerable: true, get: function () { return file_state_1.statesMatch; } });
+// Build cache utilities
+var build_cache_1 = require("./build-cache");
+Object.defineProperty(exports, "checkOutputExists", { enumerable: true, get: function () { return build_cache_1.checkOutputExists; } });
+Object.defineProperty(exports, "computeComponentFileStates", { enumerable: true, get: function () { return build_cache_1.computeComponentFileStates; } });
+Object.defineProperty(exports, "computeGlobalDepsState", { enumerable: true, get: function () { return build_cache_1.computeGlobalDepsState; } });
+Object.defineProperty(exports, "createEmptyCache", { enumerable: true, get: function () { return build_cache_1.createEmptyCache; } });
+Object.defineProperty(exports, "getCachePath", { enumerable: true, get: function () { return build_cache_1.getCachePath; } });
+Object.defineProperty(exports, "hasComponentChanged", { enumerable: true, get: function () { return build_cache_1.hasComponentChanged; } });
+Object.defineProperty(exports, "haveGlobalDepsChanged", { enumerable: true, get: function () { return build_cache_1.haveGlobalDepsChanged; } });
+Object.defineProperty(exports, "loadBuildCache", { enumerable: true, get: function () { return build_cache_1.loadBuildCache; } });
+Object.defineProperty(exports, "pruneRemovedComponents", { enumerable: true, get: function () { return build_cache_1.pruneRemovedComponents; } });
+Object.defineProperty(exports, "saveBuildCache", { enumerable: true, get: function () { return build_cache_1.saveBuildCache; } });
+Object.defineProperty(exports, "updateComponentCacheEntry", { enumerable: true, get: function () { return build_cache_1.updateComponentCacheEntry; } });
diff --git a/dist/cli/eject.js b/dist/cli/eject.js
index 7e584894..62d7b338 100644
--- a/dist/cli/eject.js
+++ b/dist/cli/eject.js
@@ -13,9 +13,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ejectTheme = exports.ejectPages = exports.ejectConfig = void 0;
-const chalk_1 = __importDefault(require("chalk"));
const fs_extra_1 = __importDefault(require("fs-extra"));
const path_1 = __importDefault(require("path"));
+const logger_1 = require("../utils/logger");
/**
* Eject the config to the working directory
* @param handoff
@@ -24,13 +24,13 @@ const ejectConfig = (handoff) => __awaiter(void 0, void 0, void 0, function* ()
const configPath = path_1.default.resolve(path_1.default.join(handoff.workingPath, 'handoff.config.js'));
if (fs_extra_1.default.existsSync(configPath)) {
if (!handoff.force) {
- console.log(chalk_1.default.red(`A config already exists in the working directory. Use the --force flag to overwrite.`));
+ logger_1.Logger.error(`Config file already exists. Use "--force" to overwrite.`);
}
}
// load the template as a string
const template = fs_extra_1.default.readFileSync(path_1.default.resolve(handoff.modulePath, 'config/config.template.js'), 'utf8');
fs_extra_1.default.writeFileSync(configPath, template);
- console.log(chalk_1.default.green(`Config ejected to ${configPath}`));
+ logger_1.Logger.success(`Config ejected to ${configPath}`);
return handoff;
});
exports.ejectConfig = ejectConfig;
@@ -43,13 +43,13 @@ const ejectPages = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
const workingPath = path_1.default.resolve(path_1.default.join(handoff.workingPath, 'pages'));
if (fs_extra_1.default.existsSync(workingPath)) {
if (!handoff.force) {
- console.log(chalk_1.default.yellow(`It appears you already have custom pages. Use the --force flag to merge in any pages you haven't customized.`));
+ logger_1.Logger.warn(`It appears you already have custom pages. Use the --force flag to merge in any pages you haven't customized.`);
return;
}
}
const docsPath = path_1.default.resolve(path_1.default.join(handoff.modulePath, 'config/docs'));
fs_extra_1.default.copySync(docsPath, workingPath, { overwrite: false });
- console.log(chalk_1.default.green(`Customizable pages ejected to ${workingPath}`));
+ logger_1.Logger.success(`Customizable pages ejected to ${workingPath}`);
return handoff;
});
exports.ejectPages = ejectPages;
@@ -63,7 +63,7 @@ const ejectTheme = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
const workingPath = path_1.default.resolve(path_1.default.join(handoff.workingPath, 'theme', 'default.scss'));
if (fs_extra_1.default.existsSync(workingPath)) {
if (!handoff.force) {
- console.log(chalk_1.default.yellow(`It appears you already have custom theme. Use the --force flag to replace you haven't customized.`));
+ logger_1.Logger.warn(`It appears you already have custom theme. Use the --force flag to replace you haven't customized.`);
return;
}
}
@@ -71,11 +71,11 @@ const ejectTheme = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
const docsPath = path_1.default.resolve(path_1.default.join(handoff.modulePath, `src/app/sass/themes/_${currentTheme}.scss`));
if (fs_extra_1.default.existsSync(docsPath)) {
fs_extra_1.default.copySync(docsPath, workingPath, { overwrite: false });
- console.log(chalk_1.default.green(`Customizable theme ejected to ${workingPath}`));
+ logger_1.Logger.success(`Customizable theme ejected to ${workingPath}`);
}
else {
fs_extra_1.default.copySync(path_1.default.resolve(path_1.default.join(handoff.modulePath, `src/app/sass/themes/_default.scss`)), workingPath, { overwrite: false });
- console.log(chalk_1.default.green(`Customizable theme ejected to ${workingPath}`));
+ logger_1.Logger.success(`Customizable theme ejected to ${workingPath}`);
}
return handoff;
});
diff --git a/dist/cli/make.js b/dist/cli/make.js
index 94441c55..5b24524b 100644
--- a/dist/cli/make.js
+++ b/dist/cli/make.js
@@ -16,6 +16,7 @@ exports.makeComponent = exports.makePage = exports.makeTemplate = void 0;
const chalk_1 = __importDefault(require("chalk"));
const fs_extra_1 = __importDefault(require("fs-extra"));
const path_1 = __importDefault(require("path"));
+const logger_1 = require("../utils/logger");
const prompt_1 = require("../utils/prompt");
/**
* Make a new exportable component
@@ -24,22 +25,22 @@ const prompt_1 = require("../utils/prompt");
const makeTemplate = (handoff, component, state) => __awaiter(void 0, void 0, void 0, function* () {
var _a, _b;
if (!((_b = (_a = handoff === null || handoff === void 0 ? void 0 : handoff.runtimeConfig) === null || _a === void 0 ? void 0 : _a.entries) === null || _b === void 0 ? void 0 : _b.templates)) {
- console.log(chalk_1.default.red(`Runtime config does not specify entry for templates.`));
+ logger_1.Logger.error(`Runtime config does not specify entry for templates.`);
return;
}
if (!component) {
- console.log(chalk_1.default.red(`Template component must be set`));
+ logger_1.Logger.error(`Template component must be set`);
return;
}
if (!state) {
state = 'default';
}
if (!/^[a-z0-9]+$/i.test(component)) {
- console.log(chalk_1.default.red(`Template component must be alphanumeric and may contain dashes or underscores`));
+ logger_1.Logger.error(`Template component must be alphanumeric and may contain dashes or underscores`);
return;
}
if (!/^[a-z0-9]+$/i.test(state)) {
- console.log(chalk_1.default.red(`Template state must be alphanumeric and may contain dashes or underscores`));
+ logger_1.Logger.error(`Template state must be alphanumeric and may contain dashes or underscores`);
return;
}
const workingPath = path_1.default.resolve(handoff.runtimeConfig.entries.templates, component);
@@ -49,14 +50,14 @@ const makeTemplate = (handoff, component, state) => __awaiter(void 0, void 0, vo
const target = path_1.default.resolve(workingPath, `${state}.html`);
if (fs_extra_1.default.existsSync(target)) {
if (!handoff.force) {
- console.log(chalk_1.default.yellow(`'${state}' already exists as custom template. Use the --force flag revert it to default.`));
+ logger_1.Logger.warn(`'${state}' already exists as custom template. Use the --force flag revert it to default.`);
return;
}
}
const templatePath = path_1.default.resolve(path_1.default.join(handoff.modulePath, 'config/templates', 'template.html'));
const template = fs_extra_1.default.readFileSync(templatePath, 'utf8');
fs_extra_1.default.writeFileSync(target, template);
- console.log(chalk_1.default.green(`New template ${state}.html was created in ${workingPath}`));
+ logger_1.Logger.success(`New template ${state}.html was created in ${workingPath}`);
return handoff;
});
exports.makeTemplate = makeTemplate;
@@ -67,17 +68,17 @@ exports.makeTemplate = makeTemplate;
const makePage = (handoff, name, parent) => __awaiter(void 0, void 0, void 0, function* () {
let type = 'md';
if (!name) {
- console.log(chalk_1.default.red(`Page name must be set`));
+ logger_1.Logger.error(`Page name must be set`);
return;
}
if (!/^[a-z0-9]+$/i.test(name)) {
- console.log(chalk_1.default.red(`Page name must be alphanumeric and may contain dashes or underscores`));
+ logger_1.Logger.error(`Page name must be alphanumeric and may contain dashes or underscores`);
return;
}
let workingPath, sourcePath, templatePath;
if (parent) {
if (!/^[a-z0-9]+$/i.test(parent)) {
- console.log(chalk_1.default.red(`Parent name must be alphanumeric and may contain dashes or underscores`));
+ logger_1.Logger.error(`Parent name must be alphanumeric and may contain dashes or underscores`);
return;
}
workingPath = path_1.default.resolve(path_1.default.join(handoff.workingPath, `pages`, parent));
@@ -93,7 +94,7 @@ const makePage = (handoff, name, parent) => __awaiter(void 0, void 0, void 0, fu
const target = path_1.default.resolve(workingPath, `${name}.${type}`);
if (fs_extra_1.default.existsSync(target)) {
if (!handoff.force) {
- console.log(chalk_1.default.yellow(`'${name}' already exists as custom page. Use the --force flag revert it to default.`));
+ logger_1.Logger.warn(`'${name}' already exists as custom page. Use the --force flag revert it to default.`);
return;
}
}
@@ -103,7 +104,7 @@ const makePage = (handoff, name, parent) => __awaiter(void 0, void 0, void 0, fu
}
const template = fs_extra_1.default.readFileSync(templatePath, 'utf8');
fs_extra_1.default.writeFileSync(target, template);
- console.log(chalk_1.default.green(`New template ${name}.${type} was created in ${workingPath}`));
+ logger_1.Logger.success(`New template ${name}.${type} was created in ${workingPath}`);
return handoff;
});
exports.makePage = makePage;
@@ -113,7 +114,7 @@ exports.makePage = makePage;
*/
const makeComponent = (handoff, name) => __awaiter(void 0, void 0, void 0, function* () {
if (!name) {
- console.log(chalk_1.default.red(`Component name must be set`));
+ logger_1.Logger.error(`Component name must be set`);
return;
}
const version = '1.0.0';
@@ -125,7 +126,7 @@ const makeComponent = (handoff, name) => __awaiter(void 0, void 0, void 0, funct
const targetHtml = path_1.default.resolve(workingPath, `${name}.hbs`);
if (fs_extra_1.default.existsSync(targetHtml)) {
if (!handoff.force) {
- console.log(chalk_1.default.yellow(`'${name}' already exists as custom component.`));
+ logger_1.Logger.warn(`'${name}' already exists as custom component.`);
return;
}
}
@@ -133,20 +134,20 @@ const makeComponent = (handoff, name) => __awaiter(void 0, void 0, void 0, funct
const htmlPath = path_1.default.resolve(templatePath, 'template.hbs');
const htmlTemplate = fs_extra_1.default.readFileSync(htmlPath, 'utf8');
fs_extra_1.default.writeFileSync(targetHtml, htmlTemplate);
- console.log(chalk_1.default.green(`New component ${name}.hbs was created in ${workingPath}`));
+ logger_1.Logger.success(`New component ${name}.hbs was created in ${workingPath}`);
const jsonpath = path_1.default.resolve(templatePath, 'template.json');
const jsonTemplate = fs_extra_1.default.readFileSync(jsonpath, 'utf8');
fs_extra_1.default.writeFileSync(path_1.default.resolve(workingPath, `${name}.json`), jsonTemplate);
const writeJSFile = yield (0, prompt_1.prompt)(chalk_1.default.green(`Would you like us to generate a supporting javascript file ${name}.js? (y/n): `));
if (writeJSFile === 'y') {
- console.log(chalk_1.default.green(`Writing ${name}.js.\n`));
+ logger_1.Logger.success(`Writing ${name}.js.\n`);
const jsPath = path_1.default.resolve(templatePath, 'template.js');
const jsTemplate = fs_extra_1.default.readFileSync(jsPath, 'utf8');
fs_extra_1.default.writeFileSync(path_1.default.resolve(workingPath, `${name}.js`), jsTemplate);
}
const writeSassFile = yield (0, prompt_1.prompt)(chalk_1.default.green(`Would you like us to generate a supporting SASS file ${name}.scss? (y/n): `));
if (writeSassFile === 'y') {
- console.log(chalk_1.default.green(`Writing ${name}.scss.\n`));
+ logger_1.Logger.success(`Writing ${name}.scss.\n`);
const scssPath = path_1.default.resolve(templatePath, 'template.scss');
const scssTemplate = fs_extra_1.default.readFileSync(scssPath, 'utf8');
fs_extra_1.default.writeFileSync(path_1.default.resolve(workingPath, `${name}.scss`), scssTemplate);
diff --git a/dist/commands/make/component.js b/dist/commands/make/component.js
index 424e4d28..9e9fdb68 100644
--- a/dist/commands/make/component.js
+++ b/dist/commands/make/component.js
@@ -13,6 +13,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const __1 = __importDefault(require("../.."));
+const logger_1 = require("../../utils/logger");
const utils_1 = require("../utils");
const command = {
command: 'make:component ',
@@ -26,9 +27,8 @@ const command = {
handler: (args) => __awaiter(void 0, void 0, void 0, function* () {
const handoff = new __1.default(args.debug, args.force);
const componentName = args.name;
- const version = args.version;
if (!/^[a-z0-9_-]+$/i.test(componentName)) {
- console.error(`Component name must be alphanumeric and may contain dashes or underscores`);
+ logger_1.Logger.error(`Component name must be alphanumeric and may contain dashes or underscores`);
return;
}
yield handoff.makeComponent(componentName);
diff --git a/dist/commands/make/page.js b/dist/commands/make/page.js
index 48360194..9af2cd82 100644
--- a/dist/commands/make/page.js
+++ b/dist/commands/make/page.js
@@ -13,6 +13,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const __1 = __importDefault(require("../../"));
+const logger_1 = require("../../utils/logger");
const utils_1 = require("../utils");
const command = {
command: 'make:page [parent]',
@@ -32,12 +33,12 @@ const command = {
const handoff = new __1.default(args.debug, args.force);
const pageName = args.name;
if (!/^[a-z0-9]+$/i.test(pageName)) {
- console.error(`Page name must be alphanumeric and may contain dashes or underscores`);
+ logger_1.Logger.error(`Page name must be alphanumeric and may contain dashes or underscores`);
return;
}
let pageParent = args.parent;
if (pageParent && !/^[a-z0-9]+$/i.test(pageParent)) {
- console.error(`Page parent must be alphanumeric and may contain dashes or underscores`);
+ logger_1.Logger.error(`Page parent must be alphanumeric and may contain dashes or underscores`);
return;
}
yield handoff.makePage(pageName, pageParent);
diff --git a/dist/commands/make/template.js b/dist/commands/make/template.js
index 883cad52..c776f778 100644
--- a/dist/commands/make/template.js
+++ b/dist/commands/make/template.js
@@ -13,6 +13,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const __1 = __importDefault(require("../../"));
+const logger_1 = require("../../utils/logger");
const utils_1 = require("../utils");
const command = {
command: 'make:template [state]',
@@ -32,12 +33,12 @@ const command = {
const handoff = new __1.default(args.debug, args.force);
const templateComponent = args.component;
if (!/^[a-z0-9]+$/i.test(templateComponent)) {
- console.error(`Template component must be alphanumeric and may contain dashes or underscores`);
+ logger_1.Logger.error(`Template component must be alphanumeric and may contain dashes or underscores`);
return;
}
let templateState = args.state;
if (templateState && !/^[a-z0-9]+$/i.test(templateState)) {
- console.error(`Template state must be alphanumeric and may contain dashes or underscores`);
+ logger_1.Logger.error(`Template state must be alphanumeric and may contain dashes or underscores`);
return;
}
yield handoff.makeTemplate(templateComponent, templateState);
diff --git a/dist/commands/utils.d.ts b/dist/commands/utils.d.ts
index 96cb59c5..39d54aa4 100644
--- a/dist/commands/utils.d.ts
+++ b/dist/commands/utils.d.ts
@@ -1,5 +1,5 @@
import { Argv } from 'yargs';
-export declare const getSharedOptions: (yargs: Argv) => Argv & import("yargs").InferredOptionTypes<{
+export declare const getSharedOptions: (yargs: Argv) => Argv & import("yargs").InferredOptionTypes<{
config: {
alias: string;
type: "string";
diff --git a/dist/index.js b/dist/index.js
index d8450b3c..d237ae5c 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -36,7 +36,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.CoreTypes = exports.CoreTransformerUtils = exports.CoreTransformers = exports.initRuntimeConfig = void 0;
-const chalk_1 = __importDefault(require("chalk"));
require("dotenv/config");
const fs_extra_1 = __importDefault(require("fs-extra"));
const handoff_core_1 = require("handoff-core");
@@ -49,6 +48,7 @@ const config_1 = require("./config");
const pipeline_1 = __importStar(require("./pipeline"));
const component_1 = require("./transformers/preview/component");
const builder_1 = __importStar(require("./transformers/preview/component/builder"));
+const logger_1 = require("./utils/logger");
const path_2 = require("./utils/path");
class Handoff {
constructor(debug, force, config) {
@@ -67,6 +67,7 @@ class Handoff {
this.config = null;
this.debug = debug !== null && debug !== void 0 ? debug : false;
this.force = force !== null && force !== void 0 ? force : false;
+ logger_1.Logger.init({ debug: this.debug });
this.init(config);
global.handoff = this;
}
@@ -177,13 +178,9 @@ class Handoff {
}
validateComponents(skipBuild) {
return __awaiter(this, void 0, void 0, function* () {
- let segmentToProcess = builder_1.ComponentSegment.Validation;
- if (skipBuild) {
- segmentToProcess = builder_1.ComponentSegment.ValidationOnly;
- }
this.preRunner();
if (!skipBuild) {
- yield (0, builder_1.default)(this, undefined, segmentToProcess);
+ yield (0, builder_1.default)(this, undefined, builder_1.ComponentSegment.Validation);
}
return this;
});
@@ -233,16 +230,16 @@ class Handoff {
},
}, {
log: (msg) => {
- console.log(msg);
+ logger_1.Logger.log(msg);
},
err: (msg) => {
- console.log(chalk_1.default.red(msg));
+ logger_1.Logger.error(msg);
},
warn: (msg) => {
- console.log(chalk_1.default.yellow(msg));
+ logger_1.Logger.warn(msg);
},
success: (msg) => {
- console.log(chalk_1.default.green(msg));
+ logger_1.Logger.success(msg);
},
});
return this._handoffRunner;
@@ -371,8 +368,8 @@ const initConfig = (configOverride) => {
return returnConfig;
};
const initRuntimeConfig = (handoff) => {
- var _a, _b, _c, _d, _e, _f, _g, _h, _j;
- var _k;
+ var _a, _b, _c, _d, _e, _f, _g, _h;
+ var _j;
const configFiles = [];
const result = {
options: {},
@@ -389,17 +386,14 @@ const initRuntimeConfig = (handoff) => {
if (!!((_c = handoff.config.entries) === null || _c === void 0 ? void 0 : _c.js)) {
result.entries.js = path_1.default.resolve(handoff.workingPath, (_d = handoff.config.entries) === null || _d === void 0 ? void 0 : _d.js);
}
- else {
- console.log(chalk_1.default.red('No js entry found in config'), handoff.debug ? `Path: ${path_1.default.resolve(handoff.workingPath, (_e = handoff.config.entries) === null || _e === void 0 ? void 0 : _e.js)}` : '');
- }
- if ((_g = (_f = handoff.config.entries) === null || _f === void 0 ? void 0 : _f.components) === null || _g === void 0 ? void 0 : _g.length) {
+ if ((_f = (_e = handoff.config.entries) === null || _e === void 0 ? void 0 : _e.components) === null || _f === void 0 ? void 0 : _f.length) {
const componentPaths = handoff.config.entries.components.flatMap(getComponentsForPath);
for (const componentPath of componentPaths) {
const resolvedComponentPath = path_1.default.resolve(handoff.workingPath, componentPath);
const componentBaseName = path_1.default.basename(resolvedComponentPath);
const versions = getVersionsForComponent(resolvedComponentPath);
if (!versions.length) {
- console.warn(`No versions found for component at: ${resolvedComponentPath}`);
+ logger_1.Logger.warn(`No versions found for component at: ${resolvedComponentPath}`);
continue;
}
const latest = getLatestVersionForComponent(versions);
@@ -408,7 +402,7 @@ const initRuntimeConfig = (handoff) => {
const possibleConfigFiles = [`${componentBaseName}.json`, `${componentBaseName}.js`, `${componentBaseName}.cjs`];
const configFileName = possibleConfigFiles.find((file) => fs_extra_1.default.existsSync(path_1.default.resolve(resolvedComponentVersionPath, file)));
if (!configFileName) {
- console.warn(`Missing config: ${path_1.default.resolve(resolvedComponentVersionPath, possibleConfigFiles.join(' or '))}`);
+ logger_1.Logger.warn(`Missing config: ${path_1.default.resolve(resolvedComponentVersionPath, possibleConfigFiles.join(' or '))}`);
continue;
}
const resolvedComponentVersionConfigPath = path_1.default.resolve(resolvedComponentVersionPath, configFileName);
@@ -427,7 +421,7 @@ const initRuntimeConfig = (handoff) => {
}
}
catch (err) {
- console.error(`Failed to read or parse config: ${resolvedComponentVersionConfigPath}`, err);
+ logger_1.Logger.error(`Failed to read or parse config: ${resolvedComponentVersionConfigPath}`, err);
continue;
}
// Use component basename as the id
@@ -444,10 +438,10 @@ const initRuntimeConfig = (handoff) => {
component.options || (component.options = {
transformer: { defaults: {}, replace: {} },
});
- (_k = component.options).transformer || (_k.transformer = { defaults: {}, replace: {} });
+ (_j = component.options).transformer || (_j.transformer = { defaults: {}, replace: {} });
const transformer = component.options.transformer;
- (_h = transformer.cssRootClass) !== null && _h !== void 0 ? _h : (transformer.cssRootClass = null);
- (_j = transformer.tokenNameSegments) !== null && _j !== void 0 ? _j : (transformer.tokenNameSegments = null);
+ (_g = transformer.cssRootClass) !== null && _g !== void 0 ? _g : (transformer.cssRootClass = null);
+ (_h = transformer.tokenNameSegments) !== null && _h !== void 0 ? _h : (transformer.tokenNameSegments = null);
// Normalize keys and values to lowercase
transformer.defaults = toLowerCaseKeysAndValues(Object.assign({}, transformer.defaults));
transformer.replace = toLowerCaseKeysAndValues(Object.assign({}, transformer.replace));
@@ -494,12 +488,12 @@ const validateConfig = (config) => {
// TODO: Check to see if the exported folder exists before we run start
if (!config.figma_project_id && !process.env.HANDOFF_FIGMA_PROJECT_ID) {
// check to see if we can get this from the env
- console.error(chalk_1.default.red('Figma project id not found in config or env. Please run `handoff-app fetch` first.'));
+ logger_1.Logger.error('Figma Project ID missing. Please set HANDOFF_FIGMA_PROJECT_ID or run "handoff-app fetch".');
throw new Error('Cannot initialize configuration');
}
if (!config.dev_access_token && !process.env.HANDOFF_DEV_ACCESS_TOKEN) {
// check to see if we can get this from the env
- console.error(chalk_1.default.red('Dev access token not found in config or env. Please run `handoff-app fetch` first.'));
+ logger_1.Logger.error('Figma Access Token missing. Please set HANDOFF_DEV_ACCESS_TOKEN or run "handoff-app fetch".');
throw new Error('Cannot initialize configuration');
}
return config;
@@ -516,7 +510,7 @@ const getVersionsForComponent = (componentPath) => {
versions.push(versionDirectory);
}
else {
- console.error(`Invalid version directory ${versionDirectory}`);
+ logger_1.Logger.error(`Invalid version directory ${versionDirectory}`);
}
}
}
diff --git a/dist/pipeline.js b/dist/pipeline.js
index e2bd5023..b9b445d3 100644
--- a/dist/pipeline.js
+++ b/dist/pipeline.js
@@ -48,6 +48,7 @@ const app_1 = __importDefault(require("./app"));
const changelog_1 = __importDefault(require("./changelog"));
const documentation_object_1 = require("./documentation-object");
const component_1 = require("./transformers/preview/component");
+const logger_1 = require("./utils/logger");
const prompt_1 = require("./utils/prompt");
/**
* Read Previous Json File
@@ -247,8 +248,8 @@ const validateHandoffRequirements = (handoff) => __awaiter(void 0, void 0, void
// couldn't find the right version, but ...
}
if (!requirements) {
- console.log(chalk_1.default.redBright('Handoff Installation failed'));
- console.log(chalk_1.default.yellow('- Please update node to at least Node 16 https://nodejs.org/en/download. \n- You can read more about installing handoff at https://www.handoff.com/docs/'));
+ logger_1.Logger.error('Handoff installation failed.');
+ logger_1.Logger.warn('- Please update node to at least Node 16 https://nodejs.org/en/download. \n- You can read more about installing handoff at https://www.handoff.com/docs/');
throw new Error('Could not run handoff');
}
});
@@ -265,22 +266,22 @@ const validateFigmaAuth = (handoff) => __awaiter(void 0, void 0, void 0, functio
let missingEnvVars = false;
if (!DEV_ACCESS_TOKEN) {
missingEnvVars = true;
- console.log(chalk_1.default.yellow(`Figma developer access token not found. You can supply it as an environment variable or .env file at HANDOFF_DEV_ACCESS_TOKEN.
-Use these instructions to generate them ${chalk_1.default.blue(`https://help.figma.com/hc/en-us/articles/8085703771159-Manage-personal-access-tokens`)}\n`));
+ logger_1.Logger.warn(`Figma developer access token not found. You can supply it as an environment variable or .env file at HANDOFF_DEV_ACCESS_TOKEN.
+Use these instructions to generate them https://help.figma.com/hc/en-us/articles/8085703771159-Manage-personal-access-tokens\n`);
DEV_ACCESS_TOKEN = yield (0, prompt_1.maskPrompt)(chalk_1.default.green('Figma Developer Key: '));
}
if (!FIGMA_PROJECT_ID) {
missingEnvVars = true;
- console.log(chalk_1.default.yellow(`\n\nFigma project id not found. You can supply it as an environment variable or .env file at HANDOFF_FIGMA_PROJECT_ID.
-You can find this by looking at the url of your Figma file. If the url is ${chalk_1.default.blue(`https://www.figma.com/file/IGYfyraLDa0BpVXkxHY2tE/Starter-%5BV2%5D`)}
-your id would be IGYfyraLDa0BpVXkxHY2tE\n`));
+ logger_1.Logger.warn(`\n\nFigma project id not found. You can supply it as an environment variable or .env file at HANDOFF_FIGMA_PROJECT_ID.
+You can find this by looking at the url of your Figma file. If the url is https://www.figma.com/file/IGYfyraLDa0BpVXkxHY2tE/Starter-%5BV2%5D
+your id would be IGYfyraLDa0BpVXkxHY2tE\n`);
FIGMA_PROJECT_ID = yield (0, prompt_1.maskPrompt)(chalk_1.default.green('Figma Project Id: '));
}
if (missingEnvVars) {
- console.log(chalk_1.default.yellow(`\n\nYou supplied at least one required variable. We can write these variables to a local env file for you to make it easier to run the pipeline in the future.\n`));
+ logger_1.Logger.warn(`\n\nYou supplied at least one required variable. We can write these variables to a local env file for you to make it easier to run the pipeline in the future.\n`);
const writeEnvFile = yield (0, prompt_1.prompt)(chalk_1.default.green('Write environment variables to .env file? (y/n): '));
if (writeEnvFile !== 'y') {
- console.log(chalk_1.default.green(`Skipping .env file creation. You will need to supply these variables in the future.\n`));
+ logger_1.Logger.success(`Skipped .env file creation. Please provide these variables manually.`);
}
else {
const envFilePath = path_1.default.resolve(handoff.workingPath, '.env');
@@ -295,15 +296,15 @@ HANDOFF_FIGMA_PROJECT_ID="${FIGMA_PROJECT_ID}"
.catch(() => false);
if (fileExists) {
yield fs_extra_1.default.appendFile(envFilePath, envFileContent);
- console.log(chalk_1.default.green(`\nThe .env file was found and updated with new content. Since these are sensitive variables, please do not commit this file.\n`));
+ logger_1.Logger.success(`\nThe .env file was found and updated with new content. Since these are sensitive variables, please do not commit this file.\n`);
}
else {
yield fs_extra_1.default.writeFile(envFilePath, envFileContent.replace(/^\s*[\r\n]/gm, ''));
- console.log(chalk_1.default.green(`\nAn .env file was created in the root of your project. Since these are sensitive variables, please do not commit this file.\n`));
+ logger_1.Logger.success(`\nAn .env file was created in the root of your project. Since these are sensitive variables, please do not commit this file.\n`);
}
}
catch (error) {
- console.error(chalk_1.default.red('Error handling the .env file:', error));
+ logger_1.Logger.error('Error handling the .env file:', error);
}
}
}
@@ -311,7 +312,7 @@ HANDOFF_FIGMA_PROJECT_ID="${FIGMA_PROJECT_ID}"
handoff.config.figma_project_id = FIGMA_PROJECT_ID;
});
const figmaExtract = (handoff) => __awaiter(void 0, void 0, void 0, function* () {
- console.log(chalk_1.default.green(`Starting Figma data extraction.`));
+ logger_1.Logger.success(`Starting Figma data extraction.`);
let prevDocumentationObject = yield handoff.getDocumentationObject();
let changelog = (yield (0, exports.readPrevJSONFile)(handoff.getChangelogFilePath())) || [];
yield fs_extra_1.default.emptyDir(handoff.getOutputPath());
@@ -348,7 +349,7 @@ const pipeline = (handoff, build) => __awaiter(void 0, void 0, void 0, function*
if (!handoff.config) {
throw new Error('Handoff config not found');
}
- console.log(chalk_1.default.green(`Starting Handoff Figma data pipeline. Checking for environment and config.\n`));
+ logger_1.Logger.success(`Starting Handoff Figma data pipeline. Checking for environment and config.`);
yield validateHandoffRequirements(handoff);
yield validateFigmaAuth(handoff);
const documentationObject = yield figmaExtract(handoff);
diff --git a/dist/transformers/docgen/index.js b/dist/transformers/docgen/index.js
index 7a160746..0e620871 100644
--- a/dist/transformers/docgen/index.js
+++ b/dist/transformers/docgen/index.js
@@ -16,6 +16,7 @@ exports.generatePropertiesFromDocgen = void 0;
const fs_extra_1 = __importDefault(require("fs-extra"));
const path_1 = __importDefault(require("path"));
const react_docgen_typescript_1 = require("react-docgen-typescript");
+const logger_1 = require("../../utils/logger");
const schema_1 = require("../utils/schema");
/**
* Generates component properties using react-docgen-typescript
@@ -29,7 +30,7 @@ const generatePropertiesFromDocgen = (entry, handoff) => __awaiter(void 0, void
const tsconfigPath = path_1.default.resolve(handoff.workingPath, 'tsconfig.json');
// Check if tsconfig exists
if (!fs_extra_1.default.existsSync(tsconfigPath)) {
- console.warn(`TypeScript config not found at ${tsconfigPath}, using default configuration`);
+ logger_1.Logger.warn(`TypeScript config not found at ${tsconfigPath}, using default configuration`);
}
const parserConfig = {
savePropValueAsString: true,
@@ -53,7 +54,7 @@ const generatePropertiesFromDocgen = (entry, handoff) => __awaiter(void 0, void
return null;
}
catch (error) {
- console.warn(`Failed to generate docs with react-docgen-typescript for ${entry}:`, error);
+ logger_1.Logger.warn(`Failed to generate docs with react-docgen-typescript for ${entry}: ${error}`);
return null;
}
});
diff --git a/dist/transformers/plugins/handlebars-previews.js b/dist/transformers/plugins/handlebars-previews.js
index 51f210db..7afa4fa3 100644
--- a/dist/transformers/plugins/handlebars-previews.js
+++ b/dist/transformers/plugins/handlebars-previews.js
@@ -16,6 +16,8 @@ exports.handlebarsPreviewsPlugin = handlebarsPreviewsPlugin;
const fs_extra_1 = __importDefault(require("fs-extra"));
const handlebars_1 = __importDefault(require("handlebars"));
const path_1 = __importDefault(require("path"));
+const logger_1 = require("../../utils/logger");
+const vite_logger_1 = require("../utils/vite-logger");
const handlebars_2 = require("../utils/handlebars");
const html_1 = require("../utils/html");
const string_1 = require("../utils/string");
@@ -103,6 +105,9 @@ function handlebarsPreviewsPlugin(componentData, documentationComponents, handof
return {
name: PLUGIN_CONSTANTS.PLUGIN_NAME,
apply: 'build',
+ config: () => ({
+ customLogger: (0, vite_logger_1.createViteLogger)(),
+ }),
resolveId(resolveId) {
if (resolveId === PLUGIN_CONSTANTS.SCRIPT_ID) {
return resolveId;
@@ -127,14 +132,20 @@ function handlebarsPreviewsPlugin(componentData, documentationComponents, handof
const generatedPreviews = {};
// Generate previews for each variation
for (const previewKey in componentData.previews) {
- const previewData = componentData.previews[previewKey];
- // Render both normal and inspect modes
- const normalModeHtml = yield renderHandlebarsTemplate(templateContent, componentData, previewData, false);
- const inspectModeHtml = yield renderHandlebarsTemplate(templateContent, componentData, previewData, true);
- // Emit preview files
- emitPreviewFiles(componentId, previewKey, normalModeHtml, inspectModeHtml, (file) => this.emitFile(file));
- generatedPreviews[previewKey] = normalModeHtml;
- componentData.previews[previewKey].url = `${componentId}-${previewKey}.html`;
+ try {
+ const previewData = componentData.previews[previewKey];
+ // Render both normal and inspect modes
+ const normalModeHtml = yield renderHandlebarsTemplate(templateContent, componentData, previewData, false);
+ const inspectModeHtml = yield renderHandlebarsTemplate(templateContent, componentData, previewData, true);
+ // Emit preview files
+ emitPreviewFiles(componentId, previewKey, normalModeHtml, inspectModeHtml, (file) => this.emitFile(file));
+ generatedPreviews[previewKey] = normalModeHtml;
+ componentData.previews[previewKey].url = `${componentId}-${previewKey}.html`;
+ logger_1.Logger.debug(`Generated Handlebars preview: ${componentId}-${previewKey}`);
+ }
+ catch (err) {
+ logger_1.Logger.error(`Failed to generate Handlebars preview for ${componentId}-${previewKey}`, err);
+ }
}
// Update component data with results
componentData.format = PLUGIN_CONSTANTS.OUTPUT_FORMAT;
diff --git a/dist/transformers/plugins/ssr-render.js b/dist/transformers/plugins/ssr-render.js
index aef34b7e..34ba86d9 100644
--- a/dist/transformers/plugins/ssr-render.js
+++ b/dist/transformers/plugins/ssr-render.js
@@ -19,12 +19,14 @@ const path_1 = __importDefault(require("path"));
const react_1 = __importDefault(require("react"));
const server_1 = __importDefault(require("react-dom/server"));
const vite_1 = require("vite");
+const logger_1 = require("../../utils/logger");
const docgen_1 = require("../docgen");
const build_1 = require("../utils/build");
const html_1 = require("../utils/html");
const module_1 = require("../utils/module");
const schema_loader_1 = require("../utils/schema-loader");
const string_1 = require("../utils/string");
+const vite_logger_1 = require("../utils/vite-logger");
/**
* Constants for the SSR render plugin
*/
@@ -66,7 +68,7 @@ function loadComponentSchemaAndModule(componentData, componentPath, handoff) {
}
}
catch (error) {
- console.warn(`Failed to load component file ${componentPath}:`, error);
+ logger_1.Logger.warn(`Failed to load component file "${componentPath}": ${error}`);
}
}
// Step 3: Load component for rendering (if not already loaded)
@@ -76,7 +78,7 @@ function loadComponentSchemaAndModule(componentData, componentPath, handoff) {
component = moduleExports.exports.default;
}
catch (error) {
- console.error(`Failed to load component for rendering: ${componentPath}`, error);
+ logger_1.Logger.error(`Failed to load component for rendering "${componentPath}":`, error);
return [null, null];
}
}
@@ -138,8 +140,11 @@ function ssrRenderPlugin(componentData, documentationComponents, handoff) {
return {
name: PLUGIN_CONSTANTS.PLUGIN_NAME,
apply: 'build',
+ config: () => ({
+ customLogger: (0, vite_logger_1.createViteLogger)(),
+ }),
resolveId(resolveId) {
- console.log('resolveId', resolveId);
+ logger_1.Logger.debug('resolveId', resolveId);
if (resolveId === PLUGIN_CONSTANTS.SCRIPT_ID) {
return resolveId;
}
@@ -164,7 +169,7 @@ function ssrRenderPlugin(componentData, documentationComponents, handoff) {
// Load component schema and module
const [schemaProperties, ReactComponent] = yield loadComponentSchemaAndModule(componentData, componentPath, handoff);
if (!ReactComponent) {
- console.error(`Failed to load React component for ${componentId}`);
+ logger_1.Logger.error(`Failed to load React component for ${componentId}`);
return;
}
// Apply schema properties if found
@@ -202,7 +207,7 @@ function ssrRenderPlugin(componentData, documentationComponents, handoff) {
// Generate client-side hydration code
const clientHydrationSource = generateClientHydrationSource(componentPath);
// Build client-side bundle
- const clientBuildConfig = Object.assign(Object.assign({}, build_1.DEFAULT_CLIENT_BUILD_CONFIG), { stdin: {
+ const clientBuildConfig = Object.assign(Object.assign({}, build_1.DEFAULT_CLIENT_BUILD_CONFIG), { logLevel: 'silent', stdin: {
contents: clientHydrationSource,
resolveDir: process.cwd(),
loader: 'tsx',
@@ -211,8 +216,23 @@ function ssrRenderPlugin(componentData, documentationComponents, handoff) {
const finalClientBuildConfig = ((_b = (_a = handoff.config) === null || _a === void 0 ? void 0 : _a.hooks) === null || _b === void 0 ? void 0 : _b.clientBuildConfig)
? handoff.config.hooks.clientBuildConfig(clientBuildConfig)
: clientBuildConfig;
- const bundledClient = yield esbuild_1.default.build(finalClientBuildConfig);
- const clientBundleJs = bundledClient.outputFiles[0].text;
+ let clientBundleJs;
+ try {
+ const bundledClient = yield esbuild_1.default.build(finalClientBuildConfig);
+ if (bundledClient.warnings.length > 0) {
+ const messages = yield esbuild_1.default.formatMessages(bundledClient.warnings, { kind: 'warning', color: true });
+ messages.forEach((msg) => logger_1.Logger.warn(msg));
+ }
+ clientBundleJs = bundledClient.outputFiles[0].text;
+ }
+ catch (error) {
+ logger_1.Logger.error(`Failed to build client bundle for ${componentId}`);
+ if (error.errors) {
+ const messages = yield esbuild_1.default.formatMessages(error.errors, { kind: 'error', color: true });
+ messages.forEach((msg) => logger_1.Logger.error(msg));
+ }
+ continue;
+ }
// Generate complete HTML document
finalHtml = generateHtmlDocument(componentId, componentData.previews[previewKey].title, formattedHtml, clientBundleJs, previewProps);
// Emit preview files
diff --git a/dist/transformers/preview/component.js b/dist/transformers/preview/component.js
index bf7fda2a..afc99739 100644
--- a/dist/transformers/preview/component.js
+++ b/dist/transformers/preview/component.js
@@ -38,10 +38,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.getComponentOutputPath = exports.SlotType = void 0;
exports.componentTransformer = componentTransformer;
exports.processSharedStyles = processSharedStyles;
-const chalk_1 = __importDefault(require("chalk"));
const fs_extra_1 = __importDefault(require("fs-extra"));
const path_1 = __importDefault(require("path"));
const sass_1 = __importDefault(require("sass"));
+const logger_1 = require("../../utils/logger");
const api_1 = __importStar(require("./component/api"));
const builder_1 = __importDefault(require("./component/builder"));
const css_1 = require("./component/css");
@@ -88,7 +88,7 @@ function processSharedStyles(handoff) {
const scssPath = path_1.default.resolve(custom, 'shared.scss');
const cssPath = path_1.default.resolve(custom, 'shared.css');
if (fs_extra_1.default.existsSync(scssPath) && !fs_extra_1.default.existsSync(cssPath)) {
- console.log(chalk_1.default.green(`Compiling shared styles`));
+ logger_1.Logger.success(`Compiling shared styles`);
try {
const result = yield sass_1.default.compileAsync(scssPath, {
loadPaths: [
@@ -102,14 +102,13 @@ function processSharedStyles(handoff) {
// write the css to the public folder
const css = '/* These are the shared styles used in every component. */ \n\n' + result.css;
const cssPath = path_1.default.resolve(publicPath, 'shared.css');
- console.log(chalk_1.default.green(`Writing shared styles to ${cssPath}`));
+ logger_1.Logger.success(`Writing shared styles to ${cssPath}`);
yield fs_extra_1.default.writeFile(cssPath, result.css);
return css;
}
}
catch (e) {
- console.log(chalk_1.default.red(`Error compiling shared styles`));
- console.log(e);
+ logger_1.Logger.error(`Error compiling shared styles`, e);
}
}
else if (fs_extra_1.default.existsSync(cssPath)) {
diff --git a/dist/transformers/preview/component/api.d.ts b/dist/transformers/preview/component/api.d.ts
index edfe1f96..353c2335 100644
--- a/dist/transformers/preview/component/api.d.ts
+++ b/dist/transformers/preview/component/api.d.ts
@@ -15,4 +15,19 @@ export declare const writeComponentMetadataApi: (id: string, summary: ComponentL
* @param componentData
*/
export declare const updateComponentSummaryApi: (handoff: Handoff, componentData: ComponentListObject[], isFullRebuild?: boolean) => Promise;
+/**
+ * Read the component API data for a specific version
+ * @param handoff
+ * @param id
+ * @param version
+ * @returns
+ */
+export declare const readComponentApi: (handoff: Handoff, id: string, version: string) => Promise;
+/**
+ * Read the component metadata/summary (the {id}.json file)
+ * @param handoff
+ * @param id
+ * @returns The component summary or null if not found
+ */
+export declare const readComponentMetadataApi: (handoff: Handoff, id: string) => Promise;
export default writeComponentSummaryAPI;
diff --git a/dist/transformers/preview/component/api.js b/dist/transformers/preview/component/api.js
index d4f50207..95b1f92d 100644
--- a/dist/transformers/preview/component/api.js
+++ b/dist/transformers/preview/component/api.js
@@ -12,7 +12,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.updateComponentSummaryApi = exports.writeComponentMetadataApi = exports.writeComponentApi = exports.getAPIPath = void 0;
+exports.readComponentMetadataApi = exports.readComponentApi = exports.updateComponentSummaryApi = exports.writeComponentMetadataApi = exports.writeComponentApi = exports.getAPIPath = void 0;
const fs_extra_1 = __importDefault(require("fs-extra"));
const path_1 = __importDefault(require("path"));
/**
@@ -124,4 +124,50 @@ const updateComponentSummaryApi = (handoff_1, componentData_1, ...args_1) => __a
yield writeComponentSummaryAPI(handoff, merged);
});
exports.updateComponentSummaryApi = updateComponentSummaryApi;
+/**
+ * Read the component API data for a specific version
+ * @param handoff
+ * @param id
+ * @param version
+ * @returns
+ */
+const readComponentApi = (handoff, id, version) => __awaiter(void 0, void 0, void 0, function* () {
+ const outputDirPath = path_1.default.resolve((0, exports.getAPIPath)(handoff), 'component', id);
+ const outputFilePath = path_1.default.resolve(outputDirPath, `${version}.json`);
+ if (fs_extra_1.default.existsSync(outputFilePath)) {
+ try {
+ const existingJson = yield fs_extra_1.default.readFile(outputFilePath, 'utf8');
+ if (existingJson) {
+ return JSON.parse(existingJson);
+ }
+ }
+ catch (_) {
+ // Unable to parse existing file
+ }
+ }
+ return null;
+});
+exports.readComponentApi = readComponentApi;
+/**
+ * Read the component metadata/summary (the {id}.json file)
+ * @param handoff
+ * @param id
+ * @returns The component summary or null if not found
+ */
+const readComponentMetadataApi = (handoff, id) => __awaiter(void 0, void 0, void 0, function* () {
+ const outputFilePath = path_1.default.resolve((0, exports.getAPIPath)(handoff), 'component', `${id}.json`);
+ if (fs_extra_1.default.existsSync(outputFilePath)) {
+ try {
+ const existingJson = yield fs_extra_1.default.readFile(outputFilePath, 'utf8');
+ if (existingJson) {
+ return JSON.parse(existingJson);
+ }
+ }
+ catch (_) {
+ // Unable to parse existing file
+ }
+ }
+ return null;
+});
+exports.readComponentMetadataApi = readComponentMetadataApi;
exports.default = writeComponentSummaryAPI;
diff --git a/dist/transformers/preview/component/builder.d.ts b/dist/transformers/preview/component/builder.d.ts
index 6f85c33c..669f0d7c 100644
--- a/dist/transformers/preview/component/builder.d.ts
+++ b/dist/transformers/preview/component/builder.d.ts
@@ -7,15 +7,22 @@ export declare enum ComponentSegment {
JavaScript = "javascript",
Style = "style",
Previews = "previews",
- Validation = "validation",
- ValidationOnly = "validation-only"
+ Validation = "validation"
+}
+/**
+ * Options for processing components
+ */
+export interface ProcessComponentsOptions {
+ /** Enable caching to skip unchanged components */
+ useCache?: boolean;
}
/**
* Process components and generate their code, styles, and previews
* @param handoff - The Handoff instance containing configuration and state
* @param id - Optional component ID to process a specific component
* @param segmentToProcess - Optional segment to update
+ * @param options - Optional processing options including cache settings
* @returns Promise resolving to an array of processed components
*/
-export declare function processComponents(handoff: Handoff, id?: string, segmentToProcess?: ComponentSegment): Promise;
+export declare function processComponents(handoff: Handoff, id?: string, segmentToProcess?: ComponentSegment, options?: ProcessComponentsOptions): Promise;
export default processComponents;
diff --git a/dist/transformers/preview/component/builder.js b/dist/transformers/preview/component/builder.js
index c5b67bb2..f95c5c51 100644
--- a/dist/transformers/preview/component/builder.js
+++ b/dist/transformers/preview/component/builder.js
@@ -26,6 +26,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.ComponentSegment = void 0;
exports.processComponents = processComponents;
const cloneDeep_1 = __importDefault(require("lodash/cloneDeep"));
+const cache_1 = require("../../../cache");
+const logger_1 = require("../../../utils/logger");
const schema_1 = require("../../utils/schema");
const types_1 = require("../types");
const api_1 = require("./api");
@@ -70,100 +72,263 @@ var ComponentSegment;
ComponentSegment["Style"] = "style";
ComponentSegment["Previews"] = "previews";
ComponentSegment["Validation"] = "validation";
- ComponentSegment["ValidationOnly"] = "validation-only";
})(ComponentSegment || (exports.ComponentSegment = ComponentSegment = {}));
/**
- * Determines which keys should be preserved based on the segment being processed.
- * When processing a specific segment, we want to preserve data from other segments
- * to avoid overwriting them with undefined values.
+ * Returns a normalized build plan describing which component segments need rebuilding.
+ *
+ * The plan consolidates the conditional logic for:
+ * - Full builds (no segment specified) where every segment should be regenerated
+ * - Targeted rebuilds where only the requested segment runs
+ * - Validation sweeps that only rebuild segments with missing artifacts
+ *
+ * @param segmentToProcess Optional segment identifier coming from the caller
+ * @param existingData Previously persisted component output (if any)
*/
-function getPreserveKeysForSegment(segmentToProcess) {
- if (!segmentToProcess) {
- return []; // No preservation needed for full updates
- }
- switch (segmentToProcess) {
- case ComponentSegment.JavaScript:
- // When processing JavaScript segment, preserve CSS and previews data
- return ['css', 'sass', 'sharedStyles', 'previews', 'validations'];
- case ComponentSegment.Style:
- // When processing Style segment, preserve JavaScript and previews data
- return ['js', 'jsCompiled', 'previews', 'validations'];
- case ComponentSegment.Previews:
- // When processing Previews segment, preserve JavaScript and CSS data
- return ['js', 'jsCompiled', 'css', 'sass', 'sharedStyles', 'validations'];
- case ComponentSegment.Validation:
- // When processing Validation segment, preserve all other data
- return ['js', 'jsCompiled', 'css', 'sass', 'sharedStyles', 'previews'];
- case ComponentSegment.ValidationOnly:
- // When processing ValidationOnly segment, preserve only validation data
- return ['js', 'jsCompiled', 'css', 'sass', 'sharedStyles', 'previews'];
- default:
- return [];
- }
-}
+const createComponentBuildPlan = (segmentToProcess, existingData) => {
+ const isValidationMode = segmentToProcess === ComponentSegment.Validation;
+ const isFullBuild = !segmentToProcess;
+ const previewsMissing = !(existingData === null || existingData === void 0 ? void 0 : existingData.code) || Object.values((existingData === null || existingData === void 0 ? void 0 : existingData.previews) || {}).some((preview) => !(preview === null || preview === void 0 ? void 0 : preview.url));
+ return {
+ js: isFullBuild || segmentToProcess === ComponentSegment.JavaScript || (isValidationMode && !(existingData === null || existingData === void 0 ? void 0 : existingData.js)),
+ css: isFullBuild || segmentToProcess === ComponentSegment.Style || (isValidationMode && !(existingData === null || existingData === void 0 ? void 0 : existingData.css)),
+ previews: isFullBuild || segmentToProcess === ComponentSegment.Previews || (isValidationMode && previewsMissing),
+ validationMode: isValidationMode,
+ };
+};
/**
* Process components and generate their code, styles, and previews
* @param handoff - The Handoff instance containing configuration and state
* @param id - Optional component ID to process a specific component
* @param segmentToProcess - Optional segment to update
+ * @param options - Optional processing options including cache settings
* @returns Promise resolving to an array of processed components
*/
-function processComponents(handoff, id, segmentToProcess) {
+function processComponents(handoff, id, segmentToProcess, options) {
return __awaiter(this, void 0, void 0, function* () {
- var _a, _b, _c, _d;
+ var _a, _b, _c, _d, _e, _f;
const result = [];
const documentationObject = yield handoff.getDocumentationObject();
const components = (_a = documentationObject === null || documentationObject === void 0 ? void 0 : documentationObject.components) !== null && _a !== void 0 ? _a : {};
const sharedStyles = yield handoff.getSharedStyles();
const runtimeComponents = (_d = (_c = (_b = handoff.runtimeConfig) === null || _b === void 0 ? void 0 : _b.entries) === null || _c === void 0 ? void 0 : _c.components) !== null && _d !== void 0 ? _d : {};
- // Determine which keys to preserve based on the segment being processed
- // This ensures that when processing only specific segments (e.g., JavaScript only),
- // we don't overwrite data from other segments (e.g., CSS, previews) with undefined values
- const preserveKeys = getPreserveKeysForSegment(segmentToProcess);
- for (const runtimeComponentId of Object.keys(runtimeComponents)) {
+ const allComponentIds = Object.keys(runtimeComponents);
+ // Determine which components need building based on cache (when enabled)
+ let componentsToBuild;
+ let cache = null;
+ let currentGlobalDeps = {};
+ const componentFileStatesMap = new Map();
+ // Only use caching when:
+ // - useCache option is enabled
+ // - No specific component ID is requested (full build scenario)
+ // - No specific segment is requested (full build scenario)
+ // - Force flag is not set
+ const shouldUseCache = (options === null || options === void 0 ? void 0 : options.useCache) && !id && !segmentToProcess && !handoff.force;
+ if (shouldUseCache) {
+ logger_1.Logger.debug('Loading build cache...');
+ cache = yield (0, cache_1.loadBuildCache)(handoff);
+ currentGlobalDeps = yield (0, cache_1.computeGlobalDepsState)(handoff);
+ const globalDepsChanged = (0, cache_1.haveGlobalDepsChanged)(cache === null || cache === void 0 ? void 0 : cache.globalDeps, currentGlobalDeps);
+ if (globalDepsChanged) {
+ logger_1.Logger.info('Global dependencies changed, rebuilding all components');
+ componentsToBuild = new Set(allComponentIds);
+ }
+ else {
+ logger_1.Logger.debug('Global dependencies unchanged');
+ componentsToBuild = new Set();
+ // Evaluate each component independently
+ for (const componentId of allComponentIds) {
+ const versions = Object.keys(runtimeComponents[componentId]);
+ let needsBuild = false;
+ // Store file states for later cache update
+ const versionStatesMap = new Map();
+ componentFileStatesMap.set(componentId, versionStatesMap);
+ for (const version of versions) {
+ const currentFileStates = yield (0, cache_1.computeComponentFileStates)(handoff, componentId, version);
+ versionStatesMap.set(version, currentFileStates);
+ const cachedEntry = (_f = (_e = cache === null || cache === void 0 ? void 0 : cache.components) === null || _e === void 0 ? void 0 : _e[componentId]) === null || _f === void 0 ? void 0 : _f[version];
+ if (!cachedEntry) {
+ logger_1.Logger.info(`Component '${componentId}@${version}': new component, will build`);
+ needsBuild = true;
+ }
+ else if ((0, cache_1.hasComponentChanged)(cachedEntry, currentFileStates)) {
+ logger_1.Logger.info(`Component '${componentId}@${version}': source files changed, will rebuild`);
+ needsBuild = true;
+ }
+ else if (!(yield (0, cache_1.checkOutputExists)(handoff, componentId, version))) {
+ logger_1.Logger.info(`Component '${componentId}@${version}': output missing, will rebuild`);
+ needsBuild = true;
+ }
+ }
+ if (needsBuild) {
+ componentsToBuild.add(componentId);
+ }
+ else {
+ logger_1.Logger.info(`Component '${componentId}': unchanged, skipping`);
+ }
+ }
+ }
+ // Prune removed components from cache
+ if (cache) {
+ (0, cache_1.pruneRemovedComponents)(cache, allComponentIds);
+ }
+ const skippedCount = allComponentIds.length - componentsToBuild.size;
+ if (skippedCount > 0) {
+ logger_1.Logger.info(`Building ${componentsToBuild.size} of ${allComponentIds.length} components (${skippedCount} unchanged)`);
+ }
+ else if (componentsToBuild.size > 0) {
+ logger_1.Logger.info(`Building all ${componentsToBuild.size} components`);
+ }
+ else {
+ logger_1.Logger.info('All components up to date, nothing to build');
+ }
+ }
+ else {
+ // No caching - build all requested components
+ componentsToBuild = new Set(allComponentIds);
+ }
+ for (const runtimeComponentId of allComponentIds) {
+ // Skip if specific ID requested and doesn't match
if (!!id && runtimeComponentId !== id) {
continue;
}
+ // Skip if caching is enabled and this component doesn't need building
+ if (shouldUseCache && !componentsToBuild.has(runtimeComponentId)) {
+ // Even though we're skipping the build, we need to include this component's
+ // existing summary in the result to prevent data loss in components.json
+ const existingSummary = yield (0, api_1.readComponentMetadataApi)(handoff, runtimeComponentId);
+ if (existingSummary) {
+ result.push(existingSummary);
+ }
+ continue;
+ }
const versions = Object.keys(runtimeComponents[runtimeComponentId]);
const latest = (0, versions_1.getLatestVersionForComponent)(versions);
let latestVersion;
yield Promise.all(versions.map((version) => __awaiter(this, void 0, void 0, function* () {
var _a, _b;
+ // Select the current component metadata from the runtime config for this id/version.
+ // Separate out `type` to enforce/rewrite it during build.
const runtimeComponent = runtimeComponents[runtimeComponentId][version];
const { type } = runtimeComponent, restMetadata = __rest(runtimeComponent, ["type"]);
+ // Attempt to load any existing persisted component output (previous build for this id/version).
+ // This is used for incremental/partial rebuilds to retain previously generated segments when not rebuilding all.
+ const existingData = yield (0, api_1.readComponentApi)(handoff, runtimeComponentId, version);
+ // Compose the base in-memory data for building this component:
+ // - Start from a deep clone of the defaultComponent (to avoid mutation bugs)
+ // - Merge in metadata from the current runtime configuration (from config/docs)
+ // - Explicitly set `type` (defaults to Element if not provided)
let data = Object.assign(Object.assign(Object.assign({}, (0, cloneDeep_1.default)(defaultComponent)), restMetadata), { type: type || types_1.ComponentType.Element });
- if (!segmentToProcess || segmentToProcess === ComponentSegment.JavaScript || segmentToProcess === ComponentSegment.Validation) {
+ // buildPlan captures which segments need work for this run.
+ const buildPlan = createComponentBuildPlan(segmentToProcess, existingData);
+ /**
+ * Merge segment data from existing version if this segment is *not* being rebuilt.
+ * This ensures that when only one segment (e.g., Javascript, CSS, Previews) is being updated,
+ * other fields retain their previous values. This avoids unnecessary overwrites or data loss
+ * when doing segmented or partial builds.
+ */
+ if (existingData) {
+ // If we're not building JS, carry forward the previous JS output.
+ if (!buildPlan.js) {
+ data.js = existingData.js;
+ }
+ // If we're not building CSS/Sass, keep the earlier CSS and Sass outputs.
+ if (!buildPlan.css) {
+ data.css = existingData.css;
+ data.sass = existingData.sass;
+ }
+ // If we're not building previews, preserve pre-existing HTML, code snippet, and previews.
+ if (!buildPlan.previews) {
+ data.html = existingData.html;
+ data.code = existingData.code;
+ data.previews = existingData.previews;
+ }
+ /**
+ * Always keep validation results from the previous data,
+ * unless this run is specifically doing a validation update.
+ * This keeps validations current without unnecessary recomputation or accidental removal.
+ */
+ if (!buildPlan.validationMode) {
+ data.validations = existingData.validations;
+ }
+ }
+ // Build JS if needed (new build, validation missing, or explicit segment request).
+ if (buildPlan.js) {
data = yield (0, javascript_1.default)(data, handoff);
}
- if (!segmentToProcess || segmentToProcess === ComponentSegment.Style || segmentToProcess === ComponentSegment.Validation) {
+ // Build CSS if needed.
+ if (buildPlan.css) {
data = yield (0, css_1.default)(data, handoff, sharedStyles);
}
- if (!segmentToProcess || segmentToProcess === ComponentSegment.Previews || segmentToProcess === ComponentSegment.Validation) {
+ // Build previews (HTML, snapshots, etc) if needed.
+ if (buildPlan.previews) {
data = yield (0, html_1.default)(data, handoff, components);
}
- if ((segmentToProcess === ComponentSegment.Validation || segmentToProcess === ComponentSegment.ValidationOnly)
- && ((_b = (_a = handoff.config) === null || _a === void 0 ? void 0 : _a.hooks) === null || _b === void 0 ? void 0 : _b.validateComponent)) {
+ /**
+ * Run validation if explicitly requested and a hook is configured.
+ * This allows custom logic to assess the validity of the generated component data.
+ */
+ if (buildPlan.validationMode && ((_b = (_a = handoff.config) === null || _a === void 0 ? void 0 : _a.hooks) === null || _b === void 0 ? void 0 : _b.validateComponent)) {
const validationResults = yield handoff.config.hooks.validateComponent(data);
data.validations = validationResults;
}
+ // Attach the resolved sharedStyles to the component data for persistence and downstream usage.
data.sharedStyles = sharedStyles;
- // recurse through all properties and ensure that every property has an id
+ // Ensure that every property within the properties array/object contains an 'id' field.
+ // This guarantees unique identification for property entries, which is useful for updates and API consumers.
data.properties = (0, schema_1.ensureIds)(data.properties);
- yield (0, api_1.writeComponentApi)(runtimeComponentId, data, version, handoff, preserveKeys);
+ // Write the updated component data to the corresponding API file (by component ID and version) for external access and caching.
+ yield (0, api_1.writeComponentApi)(runtimeComponentId, data, version, handoff, []);
+ // Store the latest version's full data for potential summary writing after all versions are processed.
if (version === latest) {
latestVersion = data;
}
})));
+ /**
+ * After processing all requested versions for this component:
+ * - If a latestVersion was produced, write a 'latest.json' API file for the component (points to the most recent/primary version).
+ * - Build a summary object for this component and write it to its summary API file.
+ * - Add the summary to the global result list for summary/index construction.
+ * If no version could be processed for this component, throw an error.
+ */
if (latestVersion) {
- yield (0, api_1.writeComponentApi)(runtimeComponentId, latestVersion, 'latest', handoff, preserveKeys);
+ // Write the 'latest.json' snapshot for quick access to the most up-to-date version.
+ yield (0, api_1.writeComponentApi)(runtimeComponentId, latestVersion, 'latest', handoff, []);
+ // Build the summary metadata for this component (includes all versions, properties, previews, etc).
const summary = buildComponentSummary(runtimeComponentId, latestVersion, versions);
+ // Store the summary as a per-component JSON file for documentation or API use.
yield (0, api_1.writeComponentMetadataApi)(runtimeComponentId, summary, handoff);
+ // Add to the cumulative results, to later update the global components summary file.
result.push(summary);
+ // Update cache entries for this component after successful build
+ if (shouldUseCache) {
+ if (!cache) {
+ cache = (0, cache_1.createEmptyCache)();
+ }
+ const versionStatesMap = componentFileStatesMap.get(runtimeComponentId);
+ if (versionStatesMap) {
+ for (const [version, fileStates] of Array.from(versionStatesMap)) {
+ (0, cache_1.updateComponentCacheEntry)(cache, runtimeComponentId, version, fileStates);
+ }
+ }
+ else {
+ // Compute file states if not already computed (e.g., when global deps changed)
+ for (const version of versions) {
+ const fileStates = yield (0, cache_1.computeComponentFileStates)(handoff, runtimeComponentId, version);
+ (0, cache_1.updateComponentCacheEntry)(cache, runtimeComponentId, version, fileStates);
+ }
+ }
+ }
}
else {
+ // Defensive: Throw a clear error if somehow no version was processed for this component.
throw new Error(`No latest version found for ${runtimeComponentId}`);
}
}
+ // Save the updated cache
+ if (shouldUseCache && cache) {
+ cache.globalDeps = currentGlobalDeps;
+ yield (0, cache_1.saveBuildCache)(handoff, cache);
+ }
// Always merge and write summary file, even if no components processed
const isFullRebuild = !id;
yield (0, api_1.updateComponentSummaryApi)(handoff, result, isFullRebuild);
diff --git a/dist/transformers/preview/component/css.js b/dist/transformers/preview/component/css.js
index 6d9d6fa6..a3c5fb53 100644
--- a/dist/transformers/preview/component/css.js
+++ b/dist/transformers/preview/component/css.js
@@ -13,11 +13,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.buildMainCss = void 0;
-const chalk_1 = __importDefault(require("chalk"));
const fs_extra_1 = __importDefault(require("fs-extra"));
const path_1 = __importDefault(require("path"));
const vite_1 = require("vite");
const index_1 = require("../../../index");
+const logger_1 = require("../../../utils/logger");
const config_1 = __importDefault(require("../../config"));
const component_1 = require("../component");
const { pathToFileURL } = require('url');
@@ -73,7 +73,7 @@ const buildCssBundle = (_a) => __awaiter(void 0, [_a], void 0, function* ({ entr
yield (0, vite_1.build)(viteConfig);
}
catch (e) {
- console.log(chalk_1.default.red(`Error building CSS for ${entry}`));
+ logger_1.Logger.error(`Failed to build CSS for "${entry}"`);
throw e;
}
finally {
@@ -89,7 +89,7 @@ const buildCssBundle = (_a) => __awaiter(void 0, [_a], void 0, function* ({ entr
const buildComponentCss = (data, handoff, sharedStyles) => __awaiter(void 0, void 0, void 0, function* () {
var _a, _b, _c;
const id = data.id;
- console.log('buildComponentCss ------------------------------', id);
+ logger_1.Logger.debug(`buildComponentCss`, id);
const entry = (_a = data.entries) === null || _a === void 0 ? void 0 : _a.scss;
if (!entry) {
return data;
@@ -144,7 +144,7 @@ const buildComponentCss = (data, handoff, sharedStyles) => __awaiter(void 0, voi
}
}
catch (e) {
- console.log(chalk_1.default.red(`Error building CSS for ${id}`));
+ logger_1.Logger.error(`Failed to build CSS for "${id}"`);
throw e;
}
return data;
@@ -160,7 +160,7 @@ const buildMainCss = (handoff) => __awaiter(void 0, void 0, void 0, function* ()
const stat = yield fs_extra_1.default.stat(runtimeConfig.entries.scss);
const entryPath = stat.isDirectory() ? path_1.default.resolve(runtimeConfig.entries.scss, 'main.scss') : runtimeConfig.entries.scss;
if (entryPath === runtimeConfig.entries.scss || fs_extra_1.default.existsSync(entryPath)) {
- console.log(chalk_1.default.green(`Building main CSS file`));
+ logger_1.Logger.success(`Building main CSS file...`);
try {
// Setup SASS load paths
const loadPaths = [
@@ -180,8 +180,7 @@ const buildMainCss = (handoff) => __awaiter(void 0, void 0, void 0, function* ()
});
}
catch (e) {
- console.log(chalk_1.default.red(`Error building main CSS`));
- console.log(e);
+ logger_1.Logger.error(`Failed to build main CSS:`, e);
}
}
}
diff --git a/dist/transformers/preview/component/html.js b/dist/transformers/preview/component/html.js
index b83281e3..4c487f15 100644
--- a/dist/transformers/preview/component/html.js
+++ b/dist/transformers/preview/component/html.js
@@ -15,6 +15,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.buildPreviews = void 0;
const plugin_react_1 = __importDefault(require("@vitejs/plugin-react"));
const vite_1 = require("vite");
+const logger_1 = require("../../../utils/logger");
const config_1 = __importDefault(require("../../config"));
const plugins_1 = require("../../plugins");
const component_1 = require("../component");
@@ -63,7 +64,7 @@ const buildPreviews = (data, handoff, components) => __awaiter(void 0, void 0, v
yield (0, vite_1.build)(viteConfig);
}
catch (error) {
- console.error(`Error building component previews: ${data.entries.template}`, error);
+ logger_1.Logger.error(`Error building component previews: ${data.entries.template}`, error);
}
finally {
// Restore the original NODE_ENV value after vite build completes
diff --git a/dist/transformers/preview/component/javascript.js b/dist/transformers/preview/component/javascript.js
index f244ddcd..9d08d8b0 100644
--- a/dist/transformers/preview/component/javascript.js
+++ b/dist/transformers/preview/component/javascript.js
@@ -13,11 +13,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.buildMainJS = exports.buildComponentJs = void 0;
-const chalk_1 = __importDefault(require("chalk"));
const fs_extra_1 = __importDefault(require("fs-extra"));
const path_1 = __importDefault(require("path"));
const vite_1 = require("vite");
const index_1 = require("../../../index");
+const logger_1 = require("../../../utils/logger");
const config_1 = __importDefault(require("../../config"));
const component_1 = require("../component");
/**
@@ -51,7 +51,7 @@ const buildJsBundle = (_a, handoff_1) => __awaiter(void 0, [_a, handoff_1], void
yield (0, vite_1.build)(viteConfig);
}
catch (e) {
- console.error(chalk_1.default.red(`Error building ${outputFilename}`), e);
+ logger_1.Logger.error(`Failed to build JS for "${outputFilename}":`, e);
}
finally {
// Restore the original NODE_ENV value after vite build completes
@@ -93,7 +93,7 @@ const buildComponentJs = (data, handoff) => __awaiter(void 0, void 0, void 0, fu
data['jsCompiled'] = compiled;
}
catch (e) {
- console.error(`[Component JS Build Error] ${id}:`, e);
+ logger_1.Logger.error(`JS build failed for component "${id}":`, e);
}
return data;
});
diff --git a/dist/transformers/preview/types.d.ts b/dist/transformers/preview/types.d.ts
index 09b13df0..438319a4 100644
--- a/dist/transformers/preview/types.d.ts
+++ b/dist/transformers/preview/types.d.ts
@@ -90,8 +90,8 @@ export type ComponentObject = {
js?: string;
/** Optional path to the main SCSS/CSS file (if available) */
scss?: string;
- /** Optional path(s) to component template file(s) (if available) */
- templates?: string;
+ /** Optional path to component template file (if available) */
+ template?: string;
};
/** Schema describing the expected properties (props/slots) for the component */
properties: {
diff --git a/dist/transformers/utils/handlebars.js b/dist/transformers/utils/handlebars.js
index 80197919..4dd4f89f 100644
--- a/dist/transformers/utils/handlebars.js
+++ b/dist/transformers/utils/handlebars.js
@@ -5,6 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.createHandlebarsContext = exports.registerHandlebarsHelpers = void 0;
const handlebars_1 = __importDefault(require("handlebars"));
+const logger_1 = require("../../utils/logger");
/**
* Registers common Handlebars helpers
* @param data - Component data containing properties
@@ -15,7 +16,7 @@ const registerHandlebarsHelpers = (data, injectFieldWrappers) => {
handlebars_1.default.registerHelper('field', function (field, options) {
if (injectFieldWrappers) {
if (!field) {
- console.error(`Missing field declaration for ${data.id}`);
+ logger_1.Logger.error(`Missing field declaration for ${data.id}`);
return options.fn(this);
}
let parts = field.split('.');
@@ -28,7 +29,7 @@ const registerHandlebarsHelpers = (data, injectFieldWrappers) => {
current = current === null || current === void 0 ? void 0 : current[part];
}
if (!current) {
- console.error(`Undefined field path for ${data.id}`);
+ logger_1.Logger.error(`Undefined field path for ${data.id}`);
return options.fn(this);
}
return new handlebars_1.default.SafeString(`${options.fn(this)}`);
diff --git a/dist/transformers/utils/schema-loader.js b/dist/transformers/utils/schema-loader.js
index d89f25c3..57bcb1b8 100644
--- a/dist/transformers/utils/schema-loader.js
+++ b/dist/transformers/utils/schema-loader.js
@@ -14,6 +14,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.loadSchemaFromComponent = exports.loadSchemaFromFile = void 0;
const path_1 = __importDefault(require("path"));
+const logger_1 = require("../../utils/logger");
const docgen_1 = require("../docgen");
const module_1 = require("./module");
const schema_1 = require("./schema");
@@ -27,7 +28,7 @@ const loadSchemaFromFile = (schemaPath, handoff) => __awaiter(void 0, void 0, vo
var _a, _b;
const ext = path_1.default.extname(schemaPath);
if (ext !== '.ts' && ext !== '.tsx') {
- console.warn(`Schema file has unsupported extension: ${ext}`);
+ logger_1.Logger.warn(`Unsupported schema file extension: ${ext}`);
return null;
}
try {
@@ -48,7 +49,7 @@ const loadSchemaFromFile = (schemaPath, handoff) => __awaiter(void 0, void 0, vo
return null;
}
catch (error) {
- console.warn(`Failed to load separate schema file ${schemaPath}:`, error);
+ logger_1.Logger.warn(`Failed to load schema file "${schemaPath}": ${error}`);
return null;
}
});
diff --git a/dist/transformers/utils/schema.js b/dist/transformers/utils/schema.js
index 1e927bac..819560e3 100644
--- a/dist/transformers/utils/schema.js
+++ b/dist/transformers/utils/schema.js
@@ -1,6 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.loadSchemaFromExports = exports.isValidSchemaObject = exports.convertDocgenToProperties = exports.ensureIds = void 0;
+const logger_1 = require("../../utils/logger");
const component_1 = require("../preview/component");
/**
* Ensures all properties have proper IDs assigned recursively
@@ -94,7 +95,7 @@ const loadSchemaFromExports = (moduleExports, handoff, exportKey = 'default') =>
return schema;
}
catch (error) {
- console.warn(`Failed to load schema from exports (${exportKey}):`, error);
+ logger_1.Logger.warn(`Failed to load schema from exports (${exportKey}): ${error}`);
return null;
}
};
diff --git a/dist/transformers/utils/vite-logger.d.ts b/dist/transformers/utils/vite-logger.d.ts
new file mode 100644
index 00000000..b5a40fc8
--- /dev/null
+++ b/dist/transformers/utils/vite-logger.d.ts
@@ -0,0 +1,2 @@
+import { Logger as ViteLogger } from 'vite';
+export declare const createViteLogger: () => ViteLogger;
diff --git a/dist/transformers/utils/vite-logger.js b/dist/transformers/utils/vite-logger.js
new file mode 100644
index 00000000..9fccf5e0
--- /dev/null
+++ b/dist/transformers/utils/vite-logger.js
@@ -0,0 +1,34 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.createViteLogger = void 0;
+const logger_1 = require("../../utils/logger");
+const createViteLogger = () => {
+ const warnedMessages = new Set();
+ return {
+ hasWarned: false,
+ info(msg, options) {
+ logger_1.Logger.info(msg);
+ },
+ warn(msg, options) {
+ this.hasWarned = true;
+ logger_1.Logger.warn(msg);
+ },
+ warnOnce(msg, options) {
+ if (warnedMessages.has(msg))
+ return;
+ warnedMessages.add(msg);
+ this.hasWarned = true;
+ logger_1.Logger.warn(msg);
+ },
+ error(msg, options) {
+ logger_1.Logger.error(msg);
+ },
+ clearScreen(type) {
+ // No-op to preserve terminal history
+ },
+ hasErrorLogged(error) {
+ return false;
+ },
+ };
+};
+exports.createViteLogger = createViteLogger;
diff --git a/dist/utils/filter.js b/dist/utils/filter.js
index e22591fb..3db7975e 100644
--- a/dist/utils/filter.js
+++ b/dist/utils/filter.js
@@ -54,9 +54,6 @@ function evaluateFilter(obj, filter) {
function evaluateFieldFilter(obj, filter) {
const { field, op, value } = filter;
const actual = obj[field];
- if (op === 'neq') {
- console.log('EVAL', filter, actual, actual !== value);
- }
switch (op) {
case 'eq':
return { matches: actual === value };
diff --git a/dist/utils/logger.d.ts b/dist/utils/logger.d.ts
new file mode 100644
index 00000000..2750fbce
--- /dev/null
+++ b/dist/utils/logger.d.ts
@@ -0,0 +1,13 @@
+export declare class Logger {
+ private static debugMode;
+ static init(options?: {
+ debug?: boolean;
+ }): void;
+ private static getTimestamp;
+ static log(message: string): void;
+ static info(message: string): void;
+ static success(message: string): void;
+ static warn(message: string): void;
+ static error(message: string, error?: any): void;
+ static debug(message: string, data?: any): void;
+}
diff --git a/dist/utils/logger.js b/dist/utils/logger.js
new file mode 100644
index 00000000..db455921
--- /dev/null
+++ b/dist/utils/logger.js
@@ -0,0 +1,46 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Logger = void 0;
+const chalk_1 = __importDefault(require("chalk"));
+class Logger {
+ static init(options) {
+ if ((options === null || options === void 0 ? void 0 : options.debug) !== undefined) {
+ this.debugMode = options.debug;
+ }
+ }
+ static getTimestamp() {
+ const now = new Date();
+ return chalk_1.default.gray(`[${now.toISOString()}]`);
+ }
+ static log(message) {
+ console.log(`${this.getTimestamp()} ${message}`);
+ }
+ static info(message) {
+ console.log(`${this.getTimestamp()} ${chalk_1.default.cyan(message)}`);
+ }
+ static success(message) {
+ console.log(`${this.getTimestamp()} ${chalk_1.default.green(message)}`);
+ }
+ static warn(message) {
+ console.warn(`${this.getTimestamp()} ${chalk_1.default.yellow(message)}`);
+ }
+ static error(message, error) {
+ console.error(`${this.getTimestamp()} ${chalk_1.default.red(message)}`);
+ if (error) {
+ console.error(error);
+ }
+ }
+ static debug(message, data) {
+ if (this.debugMode) {
+ console.log(`${this.getTimestamp()} ${chalk_1.default.gray(`[DEBUG] ${message}`)}`);
+ if (data) {
+ console.log(data);
+ }
+ }
+ }
+}
+exports.Logger = Logger;
+Logger.debugMode = false;
diff --git a/src/app.ts b/src/app.ts
index f8020743..3f3bba4e 100644
--- a/src/app.ts
+++ b/src/app.ts
@@ -1,4 +1,3 @@
-import chalk from 'chalk';
import chokidar from 'chokidar';
import spawn from 'cross-spawn';
import fs from 'fs-extra';
@@ -12,11 +11,18 @@ import { getClientConfig } from './config';
import { buildComponents } from './pipeline';
import processComponents, { ComponentSegment } from './transformers/preview/component/builder';
import { ComponentListObject } from './transformers/preview/types';
+import { Logger } from './utils/logger';
-interface ExtWebSocket extends WebSocket {
+interface HandoffWebSocket extends WebSocket {
isAlive: boolean;
}
+interface WatcherState {
+ debounce: boolean;
+ runtimeComponentsWatcher: chokidar.FSWatcher | null;
+ runtimeConfigurationWatcher: chokidar.FSWatcher | null;
+}
+
/**
* Creates a WebSocket server that broadcasts messages to connected clients.
* Designed for development mode to help with hot-reloading.
@@ -28,25 +34,25 @@ const createWebSocketServer = async (port: number = 3001) => {
const wss = new WebSocket.Server({ port });
// Heartbeat function to mark a connection as alive.
- const heartbeat = function (this: ExtWebSocket) {
+ const heartbeat = function (this: HandoffWebSocket) {
this.isAlive = true;
};
// Setup a new connection
wss.on('connection', (ws) => {
- const extWs = ws as ExtWebSocket;
+ const extWs = ws as HandoffWebSocket;
extWs.isAlive = true;
extWs.send(JSON.stringify({ type: 'WELCOME' }));
- extWs.on('error', (error) => console.error('WebSocket error:', error));
+ extWs.on('error', (error) => Logger.error('WebSocket error:', error));
extWs.on('pong', heartbeat);
});
// Periodically ping clients to ensure they are still connected
const pingInterval = setInterval(() => {
wss.clients.forEach((client) => {
- const extWs = client as ExtWebSocket;
+ const extWs = client as HandoffWebSocket;
if (!extWs.isAlive) {
- console.log(chalk.yellow('Terminating inactive client'));
+ Logger.warn('Terminating inactive client');
return client.terminate();
}
extWs.isAlive = false;
@@ -59,11 +65,11 @@ const createWebSocketServer = async (port: number = 3001) => {
clearInterval(pingInterval);
});
- console.log(chalk.green(`WebSocket server started on ws://localhost:${port}`));
+ Logger.success(`WebSocket server listening on ws://localhost:${port}`);
// Return a function to broadcast a message to all connected clients
return (message: string) => {
- console.log(chalk.green(`Broadcasting message to ${wss.clients.size} client(s)`));
+ Logger.success(`Broadcasting message to ${wss.clients.size} client(s)`);
wss.clients.forEach((client) => {
if (client.readyState === WebSocket.OPEN) {
client.send(message);
@@ -104,11 +110,11 @@ const getAppPath = (handoff: Handoff): string => {
* Copy the public dir from the working dir to the module dir
* @param handoff
*/
-const mergePublicDir = async (handoff: Handoff): Promise => {
+const syncPublicFiles = async (handoff: Handoff): Promise => {
const appPath = getAppPath(handoff);
const workingPublicPath = getWorkingPublicPath(handoff);
if (workingPublicPath) {
- fs.copySync(workingPublicPath, path.resolve(appPath, 'public'), { overwrite: true });
+ await fs.copy(workingPublicPath, path.resolve(appPath, 'public'), { overwrite: true });
}
};
@@ -119,63 +125,74 @@ const mergePublicDir = async (handoff: Handoff): Promise => {
* @param handoff - The Handoff instance containing configuration and working paths
* @returns Promise that resolves when cleanup is complete
*/
-const performCleanup = async (handoff: Handoff): Promise => {
+const cleanupAppDirectory = async (handoff: Handoff): Promise => {
const appPath = getAppPath(handoff);
// Clean project app dir
if (fs.existsSync(appPath)) {
- await fs.rm(appPath, { recursive: true });
+ await fs.remove(appPath);
}
};
-const publishTokensApi = async (handoff: Handoff) => {
+/**
+ * Publishes the tokens API files to the public directory.
+ *
+ * @param handoff - The Handoff instance
+ */
+const generateTokensApi = async (handoff: Handoff) => {
const apiPath = path.resolve(path.join(handoff.workingPath, 'public/api'));
- if (!fs.existsSync(apiPath)) {
- fs.mkdirSync(apiPath, { recursive: true });
- }
+ await fs.ensureDir(apiPath);
const tokens = await handoff.getDocumentationObject();
// Early return if no tokens
if (!tokens) {
// Write empty tokens.json for API consistency
- fs.writeFileSync(path.join(apiPath, 'tokens.json'), JSON.stringify({}, null, 2));
+ await fs.writeJson(path.join(apiPath, 'tokens.json'), {}, { spaces: 2 });
return;
}
- fs.writeFileSync(path.join(apiPath, 'tokens.json'), JSON.stringify(tokens, null, 2));
+ await fs.writeJson(path.join(apiPath, 'tokens.json'), tokens, { spaces: 2 });
- if (!fs.existsSync(path.join(apiPath, 'tokens'))) {
- fs.mkdirSync(path.join(apiPath, 'tokens'), { recursive: true });
- }
+ const tokensDir = path.join(apiPath, 'tokens');
+ await fs.ensureDir(tokensDir);
// Only iterate if tokens has properties
if (tokens && typeof tokens === 'object') {
+ const promises: Promise[] = [];
for (const type in tokens) {
if (type === 'timestamp' || !tokens[type] || typeof tokens[type] !== 'object') continue;
for (const group in tokens[type]) {
if (tokens[type][group]) {
- fs.writeFileSync(path.join(apiPath, 'tokens', `${group}.json`), JSON.stringify(tokens[type][group], null, 2));
+ promises.push(fs.writeJson(path.join(tokensDir, `${group}.json`), tokens[type][group], { spaces: 2 }));
}
}
}
+ await Promise.all(promises);
}
};
-const prepareProjectApp = async (handoff: Handoff): Promise => {
+/**
+ * Prepares the project application by copying source files and configuring Next.js.
+ *
+ * @param handoff - The Handoff instance
+ * @returns The path to the prepared application directory
+ */
+const initializeProjectApp = async (handoff: Handoff): Promise => {
const srcPath = path.resolve(handoff.modulePath, 'src', 'app');
const appPath = getAppPath(handoff);
// Publish tokens API
- publishTokensApi(handoff);
+ await generateTokensApi(handoff);
// Prepare project app dir
- await fs.promises.mkdir(appPath, { recursive: true });
+ await fs.ensureDir(appPath);
await fs.copy(srcPath, appPath, { overwrite: true });
- await mergePublicDir(handoff);
+ await syncPublicFiles(handoff);
// Prepare project app configuration
+ // Warning: Regex replacement is fragile and depends on exact formatting in next.config.mjs
const handoffProjectId = handoff.getProjectId();
const handoffAppBasePath = handoff.config.app.base_path ?? '';
const handoffWorkingPath = path.resolve(handoff.workingPath);
@@ -198,14 +215,265 @@ const prepareProjectApp = async (handoff: Handoff): Promise => {
return appPath;
};
-const persistRuntimeCache = (handoff: Handoff) => {
+/**
+ * Persists the client config to a JSON file.
+ *
+ * @param handoff - The Handoff instance
+ */
+const persistClientConfig = async (handoff: Handoff) => {
const appPath = getAppPath(handoff);
- const destination = path.resolve(appPath, 'runtime.cache.json');
+ const destination = path.resolve(appPath, 'client.config.json');
// Ensure directory exists
- if (!fs.existsSync(appPath)) {
- fs.mkdirSync(appPath, { recursive: true });
+ await fs.ensureDir(appPath);
+ await fs.writeJson(destination, { config: getClientConfig(handoff) }, { spaces: 2 });
+};
+
+/**
+ * Watches the working public directory for changes and updates the app.
+ *
+ * @param handoff - The Handoff instance
+ * @param wss - The WebSocket broadcaster
+ * @param state - The shared watcher state
+ * @param chokidarConfig - Configuration for chokidar
+ */
+const watchPublicDirectory = (handoff: Handoff, wss: (msg: string) => void, state: WatcherState, chokidarConfig: chokidar.WatchOptions) => {
+ if (fs.existsSync(path.resolve(handoff.workingPath, 'public'))) {
+ chokidar.watch(path.resolve(handoff.workingPath, 'public'), chokidarConfig).on('all', async (event, path) => {
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ if (!state.debounce) {
+ state.debounce = true;
+ try {
+ Logger.warn('Public directory changed. Handoff will ingest the new data...');
+ await syncPublicFiles(handoff);
+ wss(JSON.stringify({ type: 'reload' }));
+ } catch (e) {
+ Logger.error('Error syncing public directory:', e);
+ } finally {
+ state.debounce = false;
+ }
+ }
+ break;
+ }
+ });
+ }
+};
+
+/**
+ * Watches the application source code for changes.
+ *
+ * @param handoff - The Handoff instance
+ */
+const watchAppSource = (handoff: Handoff) => {
+ chokidar
+ .watch(path.resolve(handoff.modulePath, 'src', 'app'), {
+ ignored: /(^|[\/\\])\../, // ignore dotfiles
+ persistent: true,
+ ignoreInitial: true,
+ })
+ .on('all', async (event, path) => {
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ try {
+ await initializeProjectApp(handoff);
+ } catch (e) {
+ Logger.error('Error initializing project app:', e);
+ }
+ break;
+ }
+ });
+};
+
+/**
+ * Watches the user's pages directory for changes.
+ *
+ * @param handoff - The Handoff instance
+ * @param chokidarConfig - Configuration for chokidar
+ */
+const watchPages = (handoff: Handoff, chokidarConfig: chokidar.WatchOptions) => {
+ if (fs.existsSync(path.resolve(handoff.workingPath, 'pages'))) {
+ chokidar.watch(path.resolve(handoff.workingPath, 'pages'), chokidarConfig).on('all', async (event, path) => {
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ try {
+ Logger.warn(`Doc page ${event}ed. Please reload browser to see changes...`);
+ Logger.debug(`Path: ${path}`);
+ } catch (e) {
+ Logger.error('Error watching pages:', e);
+ }
+ break;
+ }
+ });
+ }
+};
+
+/**
+ * Watches the SCSS entry point for changes.
+ *
+ * @param handoff - The Handoff instance
+ * @param state - The shared watcher state
+ * @param chokidarConfig - Configuration for chokidar
+ */
+const watchScss = async (handoff: Handoff, state: WatcherState, chokidarConfig: chokidar.WatchOptions) => {
+ if (handoff.runtimeConfig?.entries?.scss && fs.existsSync(handoff.runtimeConfig?.entries?.scss)) {
+ const stat = await fs.stat(handoff.runtimeConfig.entries.scss);
+ chokidar
+ .watch(stat.isDirectory() ? handoff.runtimeConfig.entries.scss : path.dirname(handoff.runtimeConfig.entries.scss), chokidarConfig)
+ .on('all', async (event, file) => {
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ if (!state.debounce) {
+ state.debounce = true;
+ try {
+ await handoff.getSharedStyles();
+ } catch (e) {
+ Logger.error('Error processing shared styles:', e);
+ } finally {
+ state.debounce = false;
+ }
+ }
+ }
+ });
+ }
+};
+
+/**
+ * Maps configuration entry types to component segments.
+ */
+const mapEntryTypeToSegment = (type: keyof ComponentListObject['entries']): ComponentSegment | undefined => {
+ return {
+ js: ComponentSegment.JavaScript,
+ scss: ComponentSegment.Style,
+ template: ComponentSegment.Previews,
+ templates: ComponentSegment.Previews,
+ }[type];
+};
+
+/**
+ * Gets the paths of runtime components to watch.
+ *
+ * @param handoff - The Handoff instance
+ * @returns A Map of paths to watch and their entry types
+ */
+const getRuntimeComponentsPathsToWatch = (handoff: Handoff) => {
+ const result: Map = new Map();
+
+ for (const runtimeComponentId of Object.keys(handoff.runtimeConfig?.entries.components ?? {})) {
+ for (const runtimeComponentVersion of Object.keys(handoff.runtimeConfig.entries.components[runtimeComponentId])) {
+ const runtimeComponent = handoff.runtimeConfig.entries.components[runtimeComponentId][runtimeComponentVersion];
+ for (const [runtimeComponentEntryType, runtimeComponentEntryPath] of Object.entries(runtimeComponent.entries ?? {})) {
+ const normalizedComponentEntryPath = runtimeComponentEntryPath as string;
+ if (fs.existsSync(normalizedComponentEntryPath)) {
+ const entryType = runtimeComponentEntryType as keyof ComponentListObject['entries'];
+ if (fs.statSync(normalizedComponentEntryPath).isFile()) {
+ result.set(path.resolve(normalizedComponentEntryPath), entryType);
+ } else {
+ result.set(normalizedComponentEntryPath, entryType);
+ }
+ }
+ }
+ }
+ }
+
+ return result;
+};
+
+/**
+ * Watches runtime components for changes.
+ *
+ * @param handoff - The Handoff instance
+ * @param state - The shared watcher state
+ * @param runtimeComponentPathsToWatch - Map of paths to watch
+ */
+const watchRuntimeComponents = (
+ handoff: Handoff,
+ state: WatcherState,
+ runtimeComponentPathsToWatch: Map
+) => {
+ if (state.runtimeComponentsWatcher) {
+ state.runtimeComponentsWatcher.close();
+ }
+
+ if (runtimeComponentPathsToWatch.size > 0) {
+ const pathsToWatch = Array.from(runtimeComponentPathsToWatch.keys());
+ state.runtimeComponentsWatcher = chokidar.watch(pathsToWatch, { ignoreInitial: true });
+ state.runtimeComponentsWatcher.on('all', async (event, file) => {
+ if (handoff.getConfigFilePaths().includes(file)) {
+ return;
+ }
+
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ if (!state.debounce) {
+ state.debounce = true;
+ try {
+ const entryType = runtimeComponentPathsToWatch.get(file);
+ const segmentToUpdate: ComponentSegment = entryType ? mapEntryTypeToSegment(entryType) : undefined;
+
+ const componentDir = path.basename(path.dirname(path.dirname(file)));
+ await processComponents(handoff, componentDir, segmentToUpdate);
+ } catch (e) {
+ Logger.error('Error processing component:', e);
+ } finally {
+ state.debounce = false;
+ }
+ }
+ break;
+ }
+ });
+ }
+};
+
+/**
+ * Watches the runtime configuration for changes.
+ *
+ * @param handoff - The Handoff instance
+ * @param state - The shared watcher state
+ */
+const watchRuntimeConfiguration = (handoff: Handoff, state: WatcherState) => {
+ if (state.runtimeConfigurationWatcher) {
+ state.runtimeConfigurationWatcher.close();
+ }
+
+ if (handoff.getConfigFilePaths().length > 0) {
+ state.runtimeConfigurationWatcher = chokidar.watch(handoff.getConfigFilePaths(), { ignoreInitial: true });
+ state.runtimeConfigurationWatcher.on('all', async (event, file) => {
+ switch (event) {
+ case 'add':
+ case 'change':
+ case 'unlink':
+ if (!state.debounce) {
+ state.debounce = true;
+ try {
+ file = path.dirname(path.dirname(file));
+ // Reload the Handoff instance to pick up configuration changes
+ handoff.reload();
+ // After reloading, persist the updated client configuration
+ await persistClientConfig(handoff);
+ // Restart the runtime components watcher to track potentially updated/added/removed components
+ watchRuntimeComponents(handoff, state, getRuntimeComponentsPathsToWatch(handoff));
+ // Process components based on the updated configuration and file path
+ await processComponents(handoff, path.basename(file));
+ } catch (e) {
+ Logger.error('Error reloading runtime configuration:', e);
+ } finally {
+ state.debounce = false;
+ }
+ }
+ break;
+ }
+ });
}
- fs.writeFileSync(destination, JSON.stringify({ config: getClientConfig(handoff), ...handoff.runtimeConfig }, null, 2), 'utf-8');
};
/**
@@ -215,15 +483,15 @@ const persistRuntimeCache = (handoff: Handoff) => {
*/
const buildApp = async (handoff: Handoff): Promise => {
// Perform cleanup
- await performCleanup(handoff);
+ await cleanupAppDirectory(handoff);
// Build components
await buildComponents(handoff);
// Prepare app
- const appPath = await prepareProjectApp(handoff);
+ const appPath = await initializeProjectApp(handoff);
- persistRuntimeCache(handoff);
+ await persistClientConfig(handoff);
// Build app
const buildResult = spawn.sync('npx', ['next', 'build'], {
@@ -245,45 +513,36 @@ const buildApp = async (handoff: Handoff): Promise => {
// Ensure output root directory exists
const outputRoot = path.resolve(handoff.workingPath, handoff.sitesDirectory);
- if (!fs.existsSync(outputRoot)) {
- fs.mkdirSync(outputRoot, { recursive: true });
- }
+ await fs.ensureDir(outputRoot);
// Clean the project output directory (if exists)
const output = path.resolve(outputRoot, handoff.getProjectId());
if (fs.existsSync(output)) {
- fs.removeSync(output);
+ await fs.remove(output);
}
// Copy the build files into the project output directory
- fs.copySync(path.resolve(appPath, 'out'), output);
+ await fs.copy(path.resolve(appPath, 'out'), output);
};
/**
- * Watch the next js application
+ * Watch the next js application.
+ * Starts a custom dev server with Handoff-specific watchers and hot-reloading.
+ *
* @param handoff
*/
export const watchApp = async (handoff: Handoff): Promise => {
- // Initial processing of the components
- await processComponents(handoff);
+ // Initial processing of the components with caching enabled
+ // This will skip rebuilding components whose source files haven't changed
+ await processComponents(handoff, undefined, undefined, { useCache: true });
- const appPath = await prepareProjectApp(handoff);
- // Include any changes made within the app source during watch
- chokidar
- .watch(path.resolve(handoff.modulePath, 'src', 'app'), {
- ignored: /(^|[\/\\])\../, // ignore dotfiles
- persistent: true,
- ignoreInitial: true,
- })
- .on('all', async (event, path) => {
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- await prepareProjectApp(handoff);
- break;
- }
- });
+ const appPath = await initializeProjectApp(handoff);
+
+ // Persist client configuration
+ await persistClientConfig(handoff);
+
+ // Watch app source
+ watchAppSource(handoff);
// // does a ts config exist?
// let tsconfigPath = 'tsconfig.json';
@@ -309,7 +568,7 @@ export const watchApp = async (handoff: Handoff): Promise => {
// purge out cache
const moduleOutput = path.resolve(appPath, 'out');
if (fs.existsSync(moduleOutput)) {
- fs.removeSync(moduleOutput);
+ await fs.remove(moduleOutput);
}
app.prepare().then(() => {
createServer(async (req, res) => {
@@ -322,17 +581,17 @@ export const watchApp = async (handoff: Handoff): Promise => {
await handle(req, res, parsedUrl);
} catch (err) {
- console.error('Error occurred handling', req.url, err);
+ Logger.error(`Error occurred handling ${req.url}`, err);
res.statusCode = 500;
res.end('internal server error');
}
})
.once('error', (err: string) => {
- console.error(err);
+ Logger.error(err);
process.exit(1);
})
.listen(port, () => {
- console.log(`> Ready on http://${hostname}:${port}`);
+ Logger.log(`Ready on http://${hostname}:${port}`);
});
});
@@ -343,172 +602,38 @@ export const watchApp = async (handoff: Handoff): Promise => {
persistent: true,
ignoreInitial: true,
};
- let debounce = false;
- if (fs.existsSync(path.resolve(handoff.workingPath, 'public'))) {
- chokidar.watch(path.resolve(handoff.workingPath, 'public'), chokidarConfig).on('all', async (event, path) => {
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- if (!debounce) {
- debounce = true;
- console.log(chalk.yellow('Public directory changed. Handoff will ingest the new data...'));
- await mergePublicDir(handoff);
- wss(JSON.stringify({ type: 'reload' }));
- debounce = false;
- }
- break;
- }
- });
- }
-
- let runtimeComponentsWatcher: chokidar.FSWatcher | null = null;
- let runtimeConfigurationWatcher: chokidar.FSWatcher | null = null;
-
- const entryTypeToSegment = (type: keyof ComponentListObject['entries']): ComponentSegment | undefined => {
- return {
- js: ComponentSegment.JavaScript,
- scss: ComponentSegment.Style,
- template: ComponentSegment.Previews,
- templates: ComponentSegment.Previews,
- }[type];
- };
-
- const watchRuntimeComponents = (runtimeComponentPathsToWatch: Map) => {
- persistRuntimeCache(handoff);
-
- if (runtimeComponentsWatcher) {
- runtimeComponentsWatcher.close();
- }
-
- if (runtimeComponentPathsToWatch.size > 0) {
- const pathsToWatch = Array.from(runtimeComponentPathsToWatch.keys());
- runtimeComponentsWatcher = chokidar.watch(pathsToWatch, { ignoreInitial: true });
- runtimeComponentsWatcher.on('all', async (event, file) => {
- if (handoff.getConfigFilePaths().includes(file)) {
- return;
- }
-
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- if (!debounce) {
- debounce = true;
- const entryType = runtimeComponentPathsToWatch.get(file);
- const segmentToUpdate: ComponentSegment = entryType ? entryTypeToSegment(entryType) : undefined;
-
- const componentDir = path.basename(path.dirname(path.dirname(file)));
- await processComponents(handoff, componentDir, segmentToUpdate);
- debounce = false;
- }
- break;
- }
- });
- }
- };
-
- const watchRuntimeConfiguration = () => {
- if (runtimeConfigurationWatcher) {
- runtimeConfigurationWatcher.close();
- }
- if (handoff.getConfigFilePaths().length > 0) {
- runtimeConfigurationWatcher = chokidar.watch(handoff.getConfigFilePaths(), { ignoreInitial: true });
- runtimeConfigurationWatcher.on('all', async (event, file) => {
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- if (!debounce) {
- debounce = true;
- file = path.dirname(path.dirname(file));
- handoff.reload();
- watchRuntimeComponents(getRuntimeComponentsPathsToWatch());
- await processComponents(handoff, path.basename(file));
- debounce = false;
- }
- break;
- }
- });
- }
+ const state: WatcherState = {
+ debounce: false,
+ runtimeComponentsWatcher: null,
+ runtimeConfigurationWatcher: null,
};
- const getRuntimeComponentsPathsToWatch = () => {
- const result: Map = new Map();
-
- for (const runtimeComponentId of Object.keys(handoff.runtimeConfig?.entries.components ?? {})) {
- for (const runtimeComponentVersion of Object.keys(handoff.runtimeConfig.entries.components[runtimeComponentId])) {
- const runtimeComponent = handoff.runtimeConfig.entries.components[runtimeComponentId][runtimeComponentVersion];
- for (const [runtimeComponentEntryType, runtimeComponentEntryPath] of Object.entries(runtimeComponent.entries ?? {})) {
- const normalizedComponentEntryPath = runtimeComponentEntryPath as string;
- if (fs.existsSync(normalizedComponentEntryPath)) {
- const entryType = runtimeComponentEntryType as keyof ComponentListObject['entries'];
- if (fs.statSync(normalizedComponentEntryPath).isFile()) {
- result.set(path.resolve(normalizedComponentEntryPath), entryType);
- } else {
- result.set(normalizedComponentEntryPath, entryType);
- }
- }
- }
- }
- }
-
- return result;
- };
-
- watchRuntimeComponents(getRuntimeComponentsPathsToWatch());
- watchRuntimeConfiguration();
-
- if (handoff.runtimeConfig?.entries?.scss && fs.existsSync(handoff.runtimeConfig?.entries?.scss)) {
- const stat = await fs.stat(handoff.runtimeConfig.entries.scss);
- chokidar
- .watch(
- stat.isDirectory() ? handoff.runtimeConfig.entries.scss : path.dirname(handoff.runtimeConfig.entries.scss),
- chokidarConfig
- )
- .on('all', async (event, file) => {
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- if (!debounce) {
- debounce = true;
- await handoff.getSharedStyles();
- debounce = false;
- }
- }
- });
- }
-
- if (fs.existsSync(path.resolve(handoff.workingPath, 'pages'))) {
- chokidar.watch(path.resolve(handoff.workingPath, 'pages'), chokidarConfig).on('all', async (event, path) => {
- switch (event) {
- case 'add':
- case 'change':
- case 'unlink':
- console.log(chalk.yellow(`Doc page ${event}ed. Please reload browser to see changes...`), path);
- break;
- }
- });
- }
+ watchPublicDirectory(handoff, wss, state, chokidarConfig);
+ watchRuntimeComponents(handoff, state, getRuntimeComponentsPathsToWatch(handoff));
+ watchRuntimeConfiguration(handoff, state);
+ await watchScss(handoff, state, chokidarConfig);
+ watchPages(handoff, chokidarConfig);
};
/**
- * Watch the next js application
+ * Watch the next js application using the standard Next.js dev server.
+ * This is useful for debugging the Next.js app itself without the Handoff overlay.
+ *
* @param handoff
*/
export const devApp = async (handoff: Handoff): Promise => {
// Prepare app
- const appPath = await prepareProjectApp(handoff);
+ const appPath = await initializeProjectApp(handoff);
// Purge app cache
const moduleOutput = path.resolve(appPath, 'out');
if (fs.existsSync(moduleOutput)) {
- fs.removeSync(moduleOutput);
+ await fs.remove(moduleOutput);
}
- persistRuntimeCache(handoff);
+ // Persist client configuration
+ await persistClientConfig(handoff);
// Run
const devResult = spawn.sync('npx', ['next', 'dev', '--port', String(handoff.config.app.ports?.app ?? 3000)], {
diff --git a/src/app/components/util/index.ts b/src/app/components/util/index.ts
index 6ac6f9f5..98ccdcb0 100644
--- a/src/app/components/util/index.ts
+++ b/src/app/components/util/index.ts
@@ -8,7 +8,6 @@ import { Types as CoreTypes } from 'handoff-core';
import { groupBy, startCase, uniq } from 'lodash';
import path from 'path';
import { ParsedUrlQuery } from 'querystring';
-import semver from 'semver';
import { SubPageType } from '../../pages/[level1]/[level2]';
// Get the parsed url string type
@@ -463,7 +462,7 @@ export const fetchComponents = (options?: FetchComponentsOptions) => {
// Include components from components.json API if requested
if (includeApi) {
- const compontnsFileExists = fs.existsSync(
+ const componentsFileExists = fs.existsSync(
path.resolve(
process.env.HANDOFF_MODULE_PATH ?? '',
'.handoff',
@@ -474,38 +473,29 @@ export const fetchComponents = (options?: FetchComponentsOptions) => {
)
);
- const componentIds = compontnsFileExists
- ? Array.from(
- new Set(
- (
- JSON.parse(
- fs.readFileSync(
- path.resolve(
- process.env.HANDOFF_MODULE_PATH ?? '',
- '.handoff',
- `${process.env.HANDOFF_PROJECT_ID}`,
- 'public',
- 'api',
- 'components.json'
- ),
- 'utf-8'
- )
- ) as ComponentListObject[]
- ).map((c) => c.id)
- )
+ if (componentsFileExists) {
+ const componentList = JSON.parse(
+ fs.readFileSync(
+ path.resolve(
+ process.env.HANDOFF_MODULE_PATH ?? '',
+ '.handoff',
+ `${process.env.HANDOFF_PROJECT_ID}`,
+ 'public',
+ 'api',
+ 'components.json'
+ ),
+ 'utf-8'
)
- : [];
-
- for (const componentId of componentIds) {
- const metadata = getLatestComponentMetadata(componentId);
- if (metadata) {
- components[componentId] = {
- type: metadata.type as ComponentType,
- group: metadata.group || '',
- description: metadata.description || '',
- name: metadata.title || '',
+ ) as ComponentListObject[];
+
+ componentList.forEach((component) => {
+ components[component.id] = {
+ type: (component.type as ComponentType) || ComponentType.Element,
+ group: component.group || '',
+ description: component.description || '',
+ name: component.title || '',
};
- }
+ });
}
}
@@ -525,72 +515,41 @@ export const fetchComponents = (options?: FetchComponentsOptions) => {
}
};
-type RuntimeCache = RuntimeConfig & { config: ClientConfig };
+type ClientConfigCache = { config: ClientConfig };
-let cachedRuntimeCache: RuntimeCache | null = null;
+let cachedClientConfig: ClientConfigCache | null = null;
-const getDefaultRuntimeCache = (): RuntimeCache => {
+const getDefaultClientConfig = (): ClientConfigCache => {
return {
config: {} as ClientConfig,
- entries: {
- scss: undefined,
- js: undefined,
- components: {},
- },
- options: {},
- } as RuntimeCache;
+ };
};
-const loadRuntimeCache = (): RuntimeCache => {
- if (cachedRuntimeCache) {
- return cachedRuntimeCache;
+const loadClientConfig = (): ClientConfigCache => {
+ if (cachedClientConfig) {
+ return cachedClientConfig;
}
const modulePath = process.env.HANDOFF_MODULE_PATH ?? '';
const projectId = process.env.HANDOFF_PROJECT_ID ?? '';
- const runtimeCachePath = path.resolve(modulePath, '.handoff', projectId, 'runtime.cache.json');
+ const clientConfigPath = path.resolve(modulePath, '.handoff', projectId, 'client.config.json');
- if (!fs.existsSync(runtimeCachePath)) {
+ if (!fs.existsSync(clientConfigPath)) {
// Return empty default instead of throwing to support running without fetch
- return getDefaultRuntimeCache();
+ return getDefaultClientConfig();
}
try {
- const cacheContent = fs.readFileSync(runtimeCachePath, 'utf-8');
- cachedRuntimeCache = JSON.parse(cacheContent) as RuntimeCache;
- return cachedRuntimeCache;
+ const cacheContent = fs.readFileSync(clientConfigPath, 'utf-8');
+ cachedClientConfig = JSON.parse(cacheContent) as ClientConfigCache;
+ return cachedClientConfig;
} catch (e) {
// Return empty default on error instead of throwing
- return getDefaultRuntimeCache();
+ return getDefaultClientConfig();
}
};
-export const getLatestComponentMetadata = (id: string) => {
- const runtimeCache = loadRuntimeCache();
-
- const components = runtimeCache.entries?.components;
-
- if (!components || !components[id]) {
- return false;
- }
-
- const versions = Object.keys(components[id]);
- if (!versions.length) {
- return false;
- }
-
- // Use natural version sorting (optional improvement below!)
- const latestVersion = semver.rsort(versions).shift();
-
- if (!latestVersion) {
- return false;
- }
-
- const latestComponent = components[id][latestVersion];
-
- return latestComponent || false;
-};
/**
* Fetch Component Doc Page Markdown
@@ -612,8 +571,8 @@ export const fetchFoundationDocPageMarkdown = (path: string, slug: string | unde
};
export const getClientRuntimeConfig = (): ClientConfig => {
- const runtimeCache = loadRuntimeCache();
- return runtimeCache.config;
+ const clientConfig = loadClientConfig();
+ return clientConfig.config;
};
export const getTokens = (): CoreTypes.IDocumentationObject => {
diff --git a/src/cache/build-cache.ts b/src/cache/build-cache.ts
new file mode 100644
index 00000000..40e1cdbb
--- /dev/null
+++ b/src/cache/build-cache.ts
@@ -0,0 +1,342 @@
+import fs from 'fs-extra';
+import path from 'path';
+import Handoff from '../index';
+import { Logger } from '../utils/logger';
+import { computeDirectoryState, computeFileState, directoryStatesMatch, FileState, statesMatch } from './file-state';
+
+/** Current cache format version - bump when structure changes */
+const CACHE_VERSION = '1.0.0';
+
+/**
+ * Cache entry for a single component version
+ */
+export interface ComponentCacheEntry {
+ /** File states for all source files of this component */
+ files: Record;
+ /** States for template directory files (if templates is a directory) */
+ templateDirFiles?: Record;
+ /** Timestamp when this component was last built */
+ buildTimestamp: number;
+}
+
+/**
+ * State of global dependencies that affect all components
+ */
+export interface GlobalDepsState {
+ /** tokens.json file state */
+ tokens?: FileState;
+ /** shared.scss or shared.css file state */
+ sharedStyles?: FileState;
+ /** Global SCSS entry file state */
+ globalScss?: FileState;
+ /** Global JS entry file state */
+ globalJs?: FileState;
+}
+
+/**
+ * Complete build cache structure
+ */
+export interface BuildCache {
+ /** Cache format version for invalidation on structure changes */
+ version: string;
+ /** State of global dependencies at last build */
+ globalDeps: GlobalDepsState;
+ /** Per-component cache entries: componentId -> version -> entry */
+ components: Record>;
+}
+
+/**
+ * Gets the path to the build cache file
+ */
+export function getCachePath(handoff: Handoff): string {
+ return path.resolve(handoff.modulePath, '.handoff', handoff.getProjectId(), '.cache', 'build-cache.json');
+}
+
+/**
+ * Loads the build cache from disk
+ * @returns The cached data or null if cache doesn't exist or is invalid
+ */
+export async function loadBuildCache(handoff: Handoff): Promise {
+ const cachePath = getCachePath(handoff);
+
+ try {
+ if (!(await fs.pathExists(cachePath))) {
+ Logger.debug('No existing build cache found');
+ return null;
+ }
+
+ const data = await fs.readJson(cachePath);
+
+ // Validate cache version
+ if (data.version !== CACHE_VERSION) {
+ Logger.debug(`Build cache version mismatch (${data.version} vs ${CACHE_VERSION}), invalidating`);
+ return null;
+ }
+
+ return data as BuildCache;
+ } catch (error) {
+ Logger.debug('Failed to load build cache, will rebuild all components:', error);
+ return null;
+ }
+}
+
+/**
+ * Saves the build cache to disk
+ * Uses atomic write (temp file + rename) to prevent corruption
+ */
+export async function saveBuildCache(handoff: Handoff, cache: BuildCache): Promise {
+ const cachePath = getCachePath(handoff);
+ const cacheDir = path.dirname(cachePath);
+ const tempPath = `${cachePath}.tmp`;
+
+ try {
+ await fs.ensureDir(cacheDir);
+ await fs.writeJson(tempPath, cache, { spaces: 2 });
+ await fs.rename(tempPath, cachePath);
+ Logger.debug('Build cache saved');
+ } catch (error) {
+ Logger.debug('Failed to save build cache:', error);
+ // Clean up temp file if it exists
+ try {
+ await fs.remove(tempPath);
+ } catch {
+ // Ignore cleanup errors
+ }
+ }
+}
+
+/**
+ * Computes the current state of global dependencies
+ */
+export async function computeGlobalDepsState(handoff: Handoff): Promise {
+ const result: GlobalDepsState = {};
+
+ // tokens.json
+ const tokensPath = handoff.getTokensFilePath();
+ result.tokens = (await computeFileState(tokensPath)) ?? undefined;
+
+ // shared.scss or shared.css
+ const sharedScssPath = path.resolve(handoff.workingPath, 'integration/components/shared.scss');
+ const sharedCssPath = path.resolve(handoff.workingPath, 'integration/components/shared.css');
+
+ const sharedScssState = await computeFileState(sharedScssPath);
+ const sharedCssState = await computeFileState(sharedCssPath);
+ result.sharedStyles = sharedScssState ?? sharedCssState ?? undefined;
+
+ // Global SCSS entry
+ if (handoff.runtimeConfig?.entries?.scss) {
+ result.globalScss = (await computeFileState(handoff.runtimeConfig.entries.scss)) ?? undefined;
+ }
+
+ // Global JS entry
+ if (handoff.runtimeConfig?.entries?.js) {
+ result.globalJs = (await computeFileState(handoff.runtimeConfig.entries.js)) ?? undefined;
+ }
+
+ return result;
+}
+
+/**
+ * Checks if global dependencies have changed
+ */
+export function haveGlobalDepsChanged(cached: GlobalDepsState | null | undefined, current: GlobalDepsState): boolean {
+ if (!cached) return true;
+
+ // Check each global dependency
+ if (!statesMatch(cached.tokens, current.tokens)) {
+ Logger.debug('Global dependency changed: tokens.json');
+ return true;
+ }
+
+ if (!statesMatch(cached.sharedStyles, current.sharedStyles)) {
+ Logger.debug('Global dependency changed: shared styles');
+ return true;
+ }
+
+ if (!statesMatch(cached.globalScss, current.globalScss)) {
+ Logger.debug('Global dependency changed: global SCSS entry');
+ return true;
+ }
+
+ if (!statesMatch(cached.globalJs, current.globalJs)) {
+ Logger.debug('Global dependency changed: global JS entry');
+ return true;
+ }
+
+ return false;
+}
+
+/**
+ * Gets all file paths that should be tracked for a component
+ */
+export function getComponentFilePaths(handoff: Handoff, componentId: string, version: string): { files: string[]; templateDir?: string } {
+ const runtimeComponent = handoff.runtimeConfig?.entries?.components?.[componentId]?.[version];
+ if (!runtimeComponent) {
+ return { files: [] };
+ }
+
+ const files: string[] = [];
+ let templateDir: string | undefined;
+
+ // Find the config file path for this component
+ const configPaths = handoff.getConfigFilePaths();
+ for (const configPath of configPaths) {
+ // Check if this config path belongs to this component/version
+ if (configPath.includes(componentId) && configPath.includes(version)) {
+ files.push(configPath);
+ break;
+ }
+ }
+
+ // Add entry files
+ const entries = runtimeComponent.entries as Record | undefined;
+ if (entries) {
+ if (entries.js) {
+ files.push(entries.js);
+ }
+ if (entries.scss) {
+ files.push(entries.scss);
+ }
+ // Handle both 'template' (singular) and 'templates' (plural) entry types
+ const templatePath = entries.template || entries.templates;
+ if (templatePath) {
+ try {
+ const stat = fs.statSync(templatePath);
+ if (stat.isDirectory()) {
+ templateDir = templatePath;
+ } else {
+ files.push(templatePath);
+ }
+ } catch {
+ // File doesn't exist, still add to track
+ files.push(templatePath);
+ }
+ }
+ }
+
+ return { files, templateDir };
+}
+
+/**
+ * Computes current file states for a component
+ */
+export async function computeComponentFileStates(
+ handoff: Handoff,
+ componentId: string,
+ version: string
+): Promise<{ files: Record; templateDirFiles?: Record }> {
+ const { files: filePaths, templateDir } = getComponentFilePaths(handoff, componentId, version);
+
+ const files: Record = {};
+
+ for (const filePath of filePaths) {
+ const state = await computeFileState(filePath);
+ if (state) {
+ files[filePath] = state;
+ }
+ }
+
+ let templateDirFiles: Record | undefined;
+ if (templateDir) {
+ templateDirFiles = await computeDirectoryState(templateDir, ['.hbs', '.html']);
+ }
+
+ return { files, templateDirFiles };
+}
+
+/**
+ * Checks if a component needs to be rebuilt based on file states
+ */
+export function hasComponentChanged(
+ cached: ComponentCacheEntry | null | undefined,
+ current: { files: Record; templateDirFiles?: Record }
+): boolean {
+ if (!cached) {
+ return true; // No cache entry means new component
+ }
+
+ // Check regular files
+ const cachedFiles = Object.keys(cached.files);
+ const currentFiles = Object.keys(current.files);
+
+ // Check if file count changed
+ if (cachedFiles.length !== currentFiles.length) {
+ return true;
+ }
+
+ // Check if any files were added or removed
+ const cachedSet = new Set(cachedFiles);
+ for (const file of currentFiles) {
+ if (!cachedSet.has(file)) {
+ return true;
+ }
+ }
+
+ // Check if any file states changed
+ for (const file of cachedFiles) {
+ if (!statesMatch(cached.files[file], current.files[file])) {
+ return true;
+ }
+ }
+
+ // Check template directory files if applicable
+ if (!directoryStatesMatch(cached.templateDirFiles, current.templateDirFiles)) {
+ return true;
+ }
+
+ return false;
+}
+
+/**
+ * Checks if the component output files exist
+ */
+export async function checkOutputExists(handoff: Handoff, componentId: string, version: string): Promise {
+ const outputPath = path.resolve(handoff.workingPath, 'public/api/component', componentId, `${version}.json`);
+ return fs.pathExists(outputPath);
+}
+
+/**
+ * Creates an empty cache structure
+ */
+export function createEmptyCache(): BuildCache {
+ return {
+ version: CACHE_VERSION,
+ globalDeps: {},
+ components: {},
+ };
+}
+
+/**
+ * Updates cache entry for a specific component version
+ */
+export function updateComponentCacheEntry(
+ cache: BuildCache,
+ componentId: string,
+ version: string,
+ fileStates: { files: Record; templateDirFiles?: Record }
+): void {
+ if (!cache.components[componentId]) {
+ cache.components[componentId] = {};
+ }
+
+ cache.components[componentId][version] = {
+ files: fileStates.files,
+ templateDirFiles: fileStates.templateDirFiles,
+ buildTimestamp: Date.now(),
+ };
+}
+
+/**
+ * Removes components from cache that are no longer in runtime config
+ */
+export function pruneRemovedComponents(cache: BuildCache, currentComponentIds: string[]): void {
+ const currentSet = new Set(currentComponentIds);
+ const cachedIds = Object.keys(cache.components);
+
+ for (const cachedId of cachedIds) {
+ if (!currentSet.has(cachedId)) {
+ Logger.debug(`Pruning removed component from cache: ${cachedId}`);
+ delete cache.components[cachedId];
+ }
+ }
+}
diff --git a/src/cache/file-state.ts b/src/cache/file-state.ts
new file mode 100644
index 00000000..3d54fa10
--- /dev/null
+++ b/src/cache/file-state.ts
@@ -0,0 +1,131 @@
+import fs from 'fs-extra';
+import path from 'path';
+
+/**
+ * Represents the state of a file for change detection
+ */
+export interface FileState {
+ /** File modification time in milliseconds */
+ mtime: number;
+ /** File size in bytes */
+ size: number;
+}
+
+/**
+ * Computes the current state (mtime, size) of a file
+ * @param filePath - Absolute path to the file
+ * @returns FileState if file exists, null otherwise
+ */
+export async function computeFileState(filePath: string): Promise {
+ try {
+ const stats = await fs.stat(filePath);
+ if (!stats.isFile()) {
+ return null;
+ }
+ return {
+ mtime: stats.mtimeMs,
+ size: stats.size,
+ };
+ } catch {
+ return null;
+ }
+}
+
+/**
+ * Computes file states for all files in a directory (recursively)
+ * @param dirPath - Absolute path to the directory
+ * @param extensions - Optional array of file extensions to include (e.g., ['.hbs', '.html'])
+ * @returns Record mapping relative file paths to their states
+ */
+export async function computeDirectoryState(dirPath: string, extensions?: string[]): Promise> {
+ const result: Record = {};
+
+ try {
+ const stats = await fs.stat(dirPath);
+ if (!stats.isDirectory()) {
+ return result;
+ }
+
+ const entries = await fs.readdir(dirPath, { withFileTypes: true });
+
+ for (const entry of entries) {
+ const fullPath = path.join(dirPath, entry.name);
+
+ if (entry.isDirectory()) {
+ // Recursively process subdirectories
+ const subDirStates = await computeDirectoryState(fullPath, extensions);
+ for (const [subPath, state] of Object.entries(subDirStates)) {
+ result[path.join(entry.name, subPath)] = state;
+ }
+ } else if (entry.isFile()) {
+ // Check extension filter if provided
+ if (extensions && extensions.length > 0) {
+ const ext = path.extname(entry.name).toLowerCase();
+ if (!extensions.includes(ext)) {
+ continue;
+ }
+ }
+
+ const fileState = await computeFileState(fullPath);
+ if (fileState) {
+ result[entry.name] = fileState;
+ }
+ }
+ }
+ } catch {
+ // Directory doesn't exist or can't be read
+ }
+
+ return result;
+}
+
+/**
+ * Compares two file states for equality
+ * @param a - First file state (can be null/undefined)
+ * @param b - Second file state (can be null/undefined)
+ * @returns true if states match, false otherwise
+ */
+export function statesMatch(a: FileState | null | undefined, b: FileState | null | undefined): boolean {
+ if (!a && !b) return true;
+ if (!a || !b) return false;
+ return a.mtime === b.mtime && a.size === b.size;
+}
+
+/**
+ * Compares two records of file states
+ * @param cached - Previously cached file states
+ * @param current - Current file states
+ * @returns true if all states match, false if any differ or files added/removed
+ */
+export function directoryStatesMatch(
+ cached: Record | null | undefined,
+ current: Record | null | undefined
+): boolean {
+ if (!cached && !current) return true;
+ if (!cached || !current) return false;
+
+ const cachedKeys = Object.keys(cached);
+ const currentKeys = Object.keys(current);
+
+ // Check if file count differs
+ if (cachedKeys.length !== currentKeys.length) {
+ return false;
+ }
+
+ // Check if any files were added or removed
+ const cachedSet = new Set(cachedKeys);
+ for (const key of currentKeys) {
+ if (!cachedSet.has(key)) {
+ return false;
+ }
+ }
+
+ // Check if any file states changed
+ for (const key of cachedKeys) {
+ if (!statesMatch(cached[key], current[key])) {
+ return false;
+ }
+ }
+
+ return true;
+}
diff --git a/src/cache/index.ts b/src/cache/index.ts
new file mode 100644
index 00000000..c267c157
--- /dev/null
+++ b/src/cache/index.ts
@@ -0,0 +1,20 @@
+// File state utilities
+export { computeDirectoryState, computeFileState, directoryStatesMatch, statesMatch, type FileState } from './file-state';
+
+// Build cache utilities
+export {
+ checkOutputExists,
+ computeComponentFileStates,
+ computeGlobalDepsState,
+ createEmptyCache,
+ getCachePath,
+ hasComponentChanged,
+ haveGlobalDepsChanged,
+ loadBuildCache,
+ pruneRemovedComponents,
+ saveBuildCache,
+ updateComponentCacheEntry,
+ type BuildCache,
+ type ComponentCacheEntry,
+ type GlobalDepsState,
+} from './build-cache';
diff --git a/src/cli/eject.ts b/src/cli/eject.ts
index ce1a8334..eb451f6c 100644
--- a/src/cli/eject.ts
+++ b/src/cli/eject.ts
@@ -1,7 +1,7 @@
-import chalk from 'chalk';
import fs from 'fs-extra';
import path from 'path';
import Handoff from '../index';
+import { Logger } from '../utils/logger';
/**
* Eject the config to the working directory
@@ -11,13 +11,13 @@ export const ejectConfig = async (handoff: Handoff) => {
const configPath = path.resolve(path.join(handoff.workingPath, 'handoff.config.js'));
if (fs.existsSync(configPath)) {
if (!handoff.force) {
- console.log(chalk.red(`A config already exists in the working directory. Use the --force flag to overwrite.`));
+ Logger.error(`Config file already exists. Use "--force" to overwrite.`);
}
}
// load the template as a string
const template = fs.readFileSync(path.resolve(handoff.modulePath, 'config/config.template.js'), 'utf8');
fs.writeFileSync(configPath, template);
- console.log(chalk.green(`Config ejected to ${configPath}`));
+ Logger.success(`Config ejected to ${configPath}`);
return handoff;
};
@@ -30,15 +30,13 @@ export const ejectPages = async (handoff: Handoff) => {
const workingPath = path.resolve(path.join(handoff.workingPath, 'pages'));
if (fs.existsSync(workingPath)) {
if (!handoff.force) {
- console.log(
- chalk.yellow(`It appears you already have custom pages. Use the --force flag to merge in any pages you haven't customized.`)
- );
+ Logger.warn(`It appears you already have custom pages. Use the --force flag to merge in any pages you haven't customized.`);
return;
}
}
const docsPath = path.resolve(path.join(handoff.modulePath, 'config/docs'));
fs.copySync(docsPath, workingPath, { overwrite: false });
- console.log(chalk.green(`Customizable pages ejected to ${workingPath}`));
+ Logger.success(`Customizable pages ejected to ${workingPath}`);
return handoff;
};
@@ -51,7 +49,7 @@ export const ejectTheme = async (handoff: Handoff) => {
const workingPath = path.resolve(path.join(handoff.workingPath, 'theme', 'default.scss'));
if (fs.existsSync(workingPath)) {
if (!handoff.force) {
- console.log(chalk.yellow(`It appears you already have custom theme. Use the --force flag to replace you haven't customized.`));
+ Logger.warn(`It appears you already have custom theme. Use the --force flag to replace you haven't customized.`);
return;
}
}
@@ -61,10 +59,10 @@ export const ejectTheme = async (handoff: Handoff) => {
if (fs.existsSync(docsPath)) {
fs.copySync(docsPath, workingPath, { overwrite: false });
- console.log(chalk.green(`Customizable theme ejected to ${workingPath}`));
+ Logger.success(`Customizable theme ejected to ${workingPath}`);
} else {
fs.copySync(path.resolve(path.join(handoff.modulePath, `src/app/sass/themes/_default.scss`)), workingPath, { overwrite: false });
- console.log(chalk.green(`Customizable theme ejected to ${workingPath}`));
+ Logger.success(`Customizable theme ejected to ${workingPath}`);
}
return handoff;
diff --git a/src/cli/make.ts b/src/cli/make.ts
index fd1d50fb..bc26e615 100644
--- a/src/cli/make.ts
+++ b/src/cli/make.ts
@@ -2,6 +2,7 @@ import chalk from 'chalk';
import fs from 'fs-extra';
import path from 'path';
import Handoff from '../index';
+import { Logger } from '../utils/logger';
import { prompt } from '../utils/prompt';
/**
@@ -10,12 +11,12 @@ import { prompt } from '../utils/prompt';
*/
export const makeTemplate = async (handoff: Handoff, component: string, state: string) => {
if (!handoff?.runtimeConfig?.entries?.templates) {
- console.log(chalk.red(`Runtime config does not specify entry for templates.`));
+ Logger.error(`Runtime config does not specify entry for templates.`);
return;
}
if (!component) {
- console.log(chalk.red(`Template component must be set`));
+ Logger.error(`Template component must be set`);
return;
}
@@ -24,12 +25,12 @@ export const makeTemplate = async (handoff: Handoff, component: string, state: s
}
if (!/^[a-z0-9]+$/i.test(component)) {
- console.log(chalk.red(`Template component must be alphanumeric and may contain dashes or underscores`));
+ Logger.error(`Template component must be alphanumeric and may contain dashes or underscores`);
return;
}
if (!/^[a-z0-9]+$/i.test(state)) {
- console.log(chalk.red(`Template state must be alphanumeric and may contain dashes or underscores`));
+ Logger.error(`Template state must be alphanumeric and may contain dashes or underscores`);
return;
}
@@ -42,14 +43,14 @@ export const makeTemplate = async (handoff: Handoff, component: string, state: s
const target = path.resolve(workingPath, `${state}.html`);
if (fs.existsSync(target)) {
if (!handoff.force) {
- console.log(chalk.yellow(`'${state}' already exists as custom template. Use the --force flag revert it to default.`));
+ Logger.warn(`'${state}' already exists as custom template. Use the --force flag revert it to default.`);
return;
}
}
const templatePath = path.resolve(path.join(handoff.modulePath, 'config/templates', 'template.html'));
const template = fs.readFileSync(templatePath, 'utf8');
fs.writeFileSync(target, template);
- console.log(chalk.green(`New template ${state}.html was created in ${workingPath}`));
+ Logger.success(`New template ${state}.html was created in ${workingPath}`);
return handoff;
};
@@ -60,18 +61,18 @@ export const makeTemplate = async (handoff: Handoff, component: string, state: s
export const makePage = async (handoff: Handoff, name: string, parent: string | undefined) => {
let type = 'md';
if (!name) {
- console.log(chalk.red(`Page name must be set`));
+ Logger.error(`Page name must be set`);
return;
}
if (!/^[a-z0-9]+$/i.test(name)) {
- console.log(chalk.red(`Page name must be alphanumeric and may contain dashes or underscores`));
+ Logger.error(`Page name must be alphanumeric and may contain dashes or underscores`);
return;
}
let workingPath, sourcePath, templatePath;
if (parent) {
if (!/^[a-z0-9]+$/i.test(parent)) {
- console.log(chalk.red(`Parent name must be alphanumeric and may contain dashes or underscores`));
+ Logger.error(`Parent name must be alphanumeric and may contain dashes or underscores`);
return;
}
workingPath = path.resolve(path.join(handoff.workingPath, `pages`, parent));
@@ -88,7 +89,7 @@ export const makePage = async (handoff: Handoff, name: string, parent: string |
const target = path.resolve(workingPath, `${name}.${type}`);
if (fs.existsSync(target)) {
if (!handoff.force) {
- console.log(chalk.yellow(`'${name}' already exists as custom page. Use the --force flag revert it to default.`));
+ Logger.warn(`'${name}' already exists as custom page. Use the --force flag revert it to default.`);
return;
}
}
@@ -99,7 +100,7 @@ export const makePage = async (handoff: Handoff, name: string, parent: string |
}
const template = fs.readFileSync(templatePath, 'utf8');
fs.writeFileSync(target, template);
- console.log(chalk.green(`New template ${name}.${type} was created in ${workingPath}`));
+ Logger.success(`New template ${name}.${type} was created in ${workingPath}`);
return handoff;
};
@@ -109,7 +110,7 @@ export const makePage = async (handoff: Handoff, name: string, parent: string |
*/
export const makeComponent = async (handoff: Handoff, name: string) => {
if (!name) {
- console.log(chalk.red(`Component name must be set`));
+ Logger.error(`Component name must be set`);
return;
}
@@ -124,7 +125,7 @@ export const makeComponent = async (handoff: Handoff, name: string) => {
const targetHtml = path.resolve(workingPath, `${name}.hbs`);
if (fs.existsSync(targetHtml)) {
if (!handoff.force) {
- console.log(chalk.yellow(`'${name}' already exists as custom component.`));
+ Logger.warn(`'${name}' already exists as custom component.`);
return;
}
}
@@ -132,7 +133,7 @@ export const makeComponent = async (handoff: Handoff, name: string) => {
const htmlPath = path.resolve(templatePath, 'template.hbs');
const htmlTemplate = fs.readFileSync(htmlPath, 'utf8');
fs.writeFileSync(targetHtml, htmlTemplate);
- console.log(chalk.green(`New component ${name}.hbs was created in ${workingPath}`));
+ Logger.success(`New component ${name}.hbs was created in ${workingPath}`);
const jsonpath = path.resolve(templatePath, 'template.json');
const jsonTemplate = fs.readFileSync(jsonpath, 'utf8');
@@ -140,7 +141,7 @@ export const makeComponent = async (handoff: Handoff, name: string) => {
const writeJSFile = await prompt(chalk.green(`Would you like us to generate a supporting javascript file ${name}.js? (y/n): `));
if (writeJSFile === 'y') {
- console.log(chalk.green(`Writing ${name}.js.\n`));
+ Logger.success(`Writing ${name}.js.\n`);
const jsPath = path.resolve(templatePath, 'template.js');
const jsTemplate = fs.readFileSync(jsPath, 'utf8');
fs.writeFileSync(path.resolve(workingPath, `${name}.js`), jsTemplate);
@@ -148,7 +149,7 @@ export const makeComponent = async (handoff: Handoff, name: string) => {
const writeSassFile = await prompt(chalk.green(`Would you like us to generate a supporting SASS file ${name}.scss? (y/n): `));
if (writeSassFile === 'y') {
- console.log(chalk.green(`Writing ${name}.scss.\n`));
+ Logger.success(`Writing ${name}.scss.\n`);
const scssPath = path.resolve(templatePath, 'template.scss');
const scssTemplate = fs.readFileSync(scssPath, 'utf8');
fs.writeFileSync(path.resolve(workingPath, `${name}.scss`), scssTemplate);
diff --git a/src/commands/make/component.ts b/src/commands/make/component.ts
index 8137103d..dfde747e 100644
--- a/src/commands/make/component.ts
+++ b/src/commands/make/component.ts
@@ -1,5 +1,6 @@
import { CommandModule } from 'yargs';
import Handoff from '../..';
+import { Logger } from '../../utils/logger';
import { SharedArgs } from '../types';
import { getSharedOptions } from '../utils';
@@ -19,12 +20,10 @@ const command: CommandModule<{}, MakeComponentArgs> = {
handler: async (args: MakeComponentArgs) => {
const handoff = new Handoff(args.debug, args.force);
-
const componentName = args.name;
- const version = args.version;
if (!/^[a-z0-9_-]+$/i.test(componentName)) {
- console.error(`Component name must be alphanumeric and may contain dashes or underscores`);
+ Logger.error(`Component name must be alphanumeric and may contain dashes or underscores`);
return;
}
diff --git a/src/commands/make/page.ts b/src/commands/make/page.ts
index 6bdcd8c1..a5e583db 100644
--- a/src/commands/make/page.ts
+++ b/src/commands/make/page.ts
@@ -1,5 +1,6 @@
import { CommandModule } from 'yargs';
import Handoff from '../../';
+import { Logger } from '../../utils/logger';
import { SharedArgs } from '../types';
import { getSharedOptions } from '../utils';
@@ -28,14 +29,14 @@ const command: CommandModule<{}, MakePageArgs> = {
const pageName = args.name;
if (!/^[a-z0-9]+$/i.test(pageName)) {
- console.error(`Page name must be alphanumeric and may contain dashes or underscores`);
+ Logger.error(`Page name must be alphanumeric and may contain dashes or underscores`);
return;
}
let pageParent = args.parent;
if (pageParent && !/^[a-z0-9]+$/i.test(pageParent)) {
- console.error(`Page parent must be alphanumeric and may contain dashes or underscores`);
+ Logger.error(`Page parent must be alphanumeric and may contain dashes or underscores`);
return;
}
diff --git a/src/commands/make/template.ts b/src/commands/make/template.ts
index 316a5a60..a3684342 100644
--- a/src/commands/make/template.ts
+++ b/src/commands/make/template.ts
@@ -1,5 +1,6 @@
import { CommandModule } from 'yargs';
import Handoff from '../../';
+import { Logger } from '../../utils/logger';
import { SharedArgs } from '../types';
import { getSharedOptions } from '../utils';
@@ -28,14 +29,14 @@ const command: CommandModule<{}, MakeTemplateArgs> = {
const templateComponent = args.component;
if (!/^[a-z0-9]+$/i.test(templateComponent)) {
- console.error(`Template component must be alphanumeric and may contain dashes or underscores`);
+ Logger.error(`Template component must be alphanumeric and may contain dashes or underscores`);
return;
}
let templateState = args.state;
if (templateState && !/^[a-z0-9]+$/i.test(templateState)) {
- console.error(`Template state must be alphanumeric and may contain dashes or underscores`);
+ Logger.error(`Template state must be alphanumeric and may contain dashes or underscores`);
return;
}
diff --git a/src/index.ts b/src/index.ts
index c38801d8..f663a20d 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -1,4 +1,3 @@
-import chalk from 'chalk';
import 'dotenv/config';
import fs from 'fs-extra';
import { Types as CoreTypes, Handoff as HandoffRunner, Providers } from 'handoff-core';
@@ -13,6 +12,7 @@ import { processSharedStyles } from './transformers/preview/component';
import processComponents, { ComponentSegment } from './transformers/preview/component/builder';
import { ComponentListObject } from './transformers/preview/types';
import { Config, RuntimeConfig } from './types/config';
+import { Logger } from './utils/logger';
import { generateFilesystemSafeId } from './utils/path';
class Handoff {
@@ -45,6 +45,7 @@ class Handoff {
this.config = null;
this.debug = debug ?? false;
this.force = force ?? false;
+ Logger.init({ debug: this.debug });
this.init(config);
global.handoff = this;
}
@@ -134,7 +135,6 @@ class Handoff {
return this;
}
-
async start(): Promise {
this.preRunner();
await watchApp(this);
@@ -148,13 +148,9 @@ class Handoff {
}
async validateComponents(skipBuild?: boolean): Promise {
- let segmentToProcess = ComponentSegment.Validation;
- if (skipBuild) {
- segmentToProcess = ComponentSegment.ValidationOnly;
- }
this.preRunner();
if (!skipBuild) {
- await processComponents(this, undefined, segmentToProcess);
+ await processComponents(this, undefined, ComponentSegment.Validation);
}
return this;
}
@@ -207,16 +203,16 @@ class Handoff {
},
{
log: (msg: string): void => {
- console.log(msg);
+ Logger.log(msg);
},
err: (msg: string): void => {
- console.log(chalk.red(msg));
+ Logger.error(msg);
},
warn: (msg: string): void => {
- console.log(chalk.yellow(msg));
+ Logger.warn(msg);
},
success: (msg: string): void => {
- console.log(chalk.green(msg));
+ Logger.success(msg);
},
}
);
@@ -375,11 +371,6 @@ export const initRuntimeConfig = (handoff: Handoff): [runtimeConfig: RuntimeConf
//console.log('result.entries.scss', handoff.config.entries, path.resolve(handoff.workingPath, handoff.config.entries?.js));
if (!!handoff.config.entries?.js) {
result.entries.js = path.resolve(handoff.workingPath, handoff.config.entries?.js);
- } else {
- console.log(
- chalk.red('No js entry found in config'),
- handoff.debug ? `Path: ${path.resolve(handoff.workingPath, handoff.config.entries?.js)}` : ''
- );
}
if (handoff.config.entries?.components?.length) {
@@ -389,7 +380,7 @@ export const initRuntimeConfig = (handoff: Handoff): [runtimeConfig: RuntimeConf
const componentBaseName = path.basename(resolvedComponentPath);
const versions = getVersionsForComponent(resolvedComponentPath);
if (!versions.length) {
- console.warn(`No versions found for component at: ${resolvedComponentPath}`);
+ Logger.warn(`No versions found for component at: ${resolvedComponentPath}`);
continue;
}
@@ -402,7 +393,7 @@ export const initRuntimeConfig = (handoff: Handoff): [runtimeConfig: RuntimeConf
const configFileName = possibleConfigFiles.find((file) => fs.existsSync(path.resolve(resolvedComponentVersionPath, file)));
if (!configFileName) {
- console.warn(`Missing config: ${path.resolve(resolvedComponentVersionPath, possibleConfigFiles.join(' or '))}`);
+ Logger.warn(`Missing config: ${path.resolve(resolvedComponentVersionPath, possibleConfigFiles.join(' or '))}`);
continue;
}
@@ -422,7 +413,7 @@ export const initRuntimeConfig = (handoff: Handoff): [runtimeConfig: RuntimeConf
component = importedComponent.default || importedComponent;
}
} catch (err) {
- console.error(`Failed to read or parse config: ${resolvedComponentVersionConfigPath}`, err);
+ Logger.error(`Failed to read or parse config: ${resolvedComponentVersionConfigPath}`, err);
continue;
}
@@ -509,12 +500,12 @@ const validateConfig = (config: Config): Config => {
// TODO: Check to see if the exported folder exists before we run start
if (!config.figma_project_id && !process.env.HANDOFF_FIGMA_PROJECT_ID) {
// check to see if we can get this from the env
- console.error(chalk.red('Figma project id not found in config or env. Please run `handoff-app fetch` first.'));
+ Logger.error('Figma Project ID missing. Please set HANDOFF_FIGMA_PROJECT_ID or run "handoff-app fetch".');
throw new Error('Cannot initialize configuration');
}
if (!config.dev_access_token && !process.env.HANDOFF_DEV_ACCESS_TOKEN) {
// check to see if we can get this from the env
- console.error(chalk.red('Dev access token not found in config or env. Please run `handoff-app fetch` first.'));
+ Logger.error('Figma Access Token missing. Please set HANDOFF_DEV_ACCESS_TOKEN or run "handoff-app fetch".');
throw new Error('Cannot initialize configuration');
}
return config;
@@ -531,7 +522,7 @@ const getVersionsForComponent = (componentPath: string): string[] => {
if (semver.valid(versionDirectory)) {
versions.push(versionDirectory);
} else {
- console.error(`Invalid version directory ${versionDirectory}`);
+ Logger.error(`Invalid version directory ${versionDirectory}`);
}
}
}
diff --git a/src/pipeline.ts b/src/pipeline.ts
index 4b9c1c96..6155efdc 100644
--- a/src/pipeline.ts
+++ b/src/pipeline.ts
@@ -12,6 +12,7 @@ import generateChangelogRecord, { ChangelogRecord } from './changelog';
import { createDocumentationObject } from './documentation-object';
import { componentTransformer } from './transformers/preview/component';
import { FontFamily } from './types/font';
+import { Logger } from './utils/logger';
import { maskPrompt, prompt } from './utils/prompt';
/**
@@ -251,11 +252,9 @@ const validateHandoffRequirements = async (handoff: Handoff) => {
// couldn't find the right version, but ...
}
if (!requirements) {
- console.log(chalk.redBright('Handoff Installation failed'));
- console.log(
- chalk.yellow(
- '- Please update node to at least Node 16 https://nodejs.org/en/download. \n- You can read more about installing handoff at https://www.handoff.com/docs/'
- )
+ Logger.error('Handoff installation failed.');
+ Logger.warn(
+ '- Please update node to at least Node 16 https://nodejs.org/en/download. \n- You can read more about installing handoff at https://www.handoff.com/docs/'
);
throw new Error('Could not run handoff');
}
@@ -277,38 +276,28 @@ const validateFigmaAuth = async (handoff: Handoff): Promise => {
if (!DEV_ACCESS_TOKEN) {
missingEnvVars = true;
- console.log(
- chalk.yellow(`Figma developer access token not found. You can supply it as an environment variable or .env file at HANDOFF_DEV_ACCESS_TOKEN.
-Use these instructions to generate them ${chalk.blue(
- `https://help.figma.com/hc/en-us/articles/8085703771159-Manage-personal-access-tokens`
- )}\n`)
- );
+ Logger.warn(`Figma developer access token not found. You can supply it as an environment variable or .env file at HANDOFF_DEV_ACCESS_TOKEN.
+Use these instructions to generate them https://help.figma.com/hc/en-us/articles/8085703771159-Manage-personal-access-tokens\n`);
DEV_ACCESS_TOKEN = await maskPrompt(chalk.green('Figma Developer Key: '));
}
if (!FIGMA_PROJECT_ID) {
missingEnvVars = true;
- console.log(
- chalk.yellow(`\n\nFigma project id not found. You can supply it as an environment variable or .env file at HANDOFF_FIGMA_PROJECT_ID.
-You can find this by looking at the url of your Figma file. If the url is ${chalk.blue(
- `https://www.figma.com/file/IGYfyraLDa0BpVXkxHY2tE/Starter-%5BV2%5D`
- )}
-your id would be IGYfyraLDa0BpVXkxHY2tE\n`)
- );
+ Logger.warn(`\n\nFigma project id not found. You can supply it as an environment variable or .env file at HANDOFF_FIGMA_PROJECT_ID.
+You can find this by looking at the url of your Figma file. If the url is https://www.figma.com/file/IGYfyraLDa0BpVXkxHY2tE/Starter-%5BV2%5D
+your id would be IGYfyraLDa0BpVXkxHY2tE\n`);
FIGMA_PROJECT_ID = await maskPrompt(chalk.green('Figma Project Id: '));
}
if (missingEnvVars) {
- console.log(
- chalk.yellow(
- `\n\nYou supplied at least one required variable. We can write these variables to a local env file for you to make it easier to run the pipeline in the future.\n`
- )
+ Logger.warn(
+ `\n\nYou supplied at least one required variable. We can write these variables to a local env file for you to make it easier to run the pipeline in the future.\n`
);
const writeEnvFile = await prompt(chalk.green('Write environment variables to .env file? (y/n): '));
if (writeEnvFile !== 'y') {
- console.log(chalk.green(`Skipping .env file creation. You will need to supply these variables in the future.\n`));
+ Logger.success(`Skipped .env file creation. Please provide these variables manually.`);
} else {
const envFilePath = path.resolve(handoff.workingPath, '.env');
const envFileContent = `
@@ -324,21 +313,17 @@ HANDOFF_FIGMA_PROJECT_ID="${FIGMA_PROJECT_ID}"
if (fileExists) {
await fs.appendFile(envFilePath, envFileContent);
- console.log(
- chalk.green(
- `\nThe .env file was found and updated with new content. Since these are sensitive variables, please do not commit this file.\n`
- )
+ Logger.success(
+ `\nThe .env file was found and updated with new content. Since these are sensitive variables, please do not commit this file.\n`
);
} else {
await fs.writeFile(envFilePath, envFileContent.replace(/^\s*[\r\n]/gm, ''));
- console.log(
- chalk.green(
- `\nAn .env file was created in the root of your project. Since these are sensitive variables, please do not commit this file.\n`
- )
+ Logger.success(
+ `\nAn .env file was created in the root of your project. Since these are sensitive variables, please do not commit this file.\n`
);
}
} catch (error) {
- console.error(chalk.red('Error handling the .env file:', error));
+ Logger.error('Error handling the .env file:', error);
}
}
}
@@ -348,7 +333,7 @@ HANDOFF_FIGMA_PROJECT_ID="${FIGMA_PROJECT_ID}"
};
const figmaExtract = async (handoff: Handoff) => {
- console.log(chalk.green(`Starting Figma data extraction.`));
+ Logger.success(`Starting Figma data extraction.`);
let prevDocumentationObject = await handoff.getDocumentationObject();
let changelog: ChangelogRecord[] = (await readPrevJSONFile(handoff.getChangelogFilePath())) || [];
@@ -406,7 +391,7 @@ const pipeline = async (handoff: Handoff, build?: boolean) => {
if (!handoff.config) {
throw new Error('Handoff config not found');
}
- console.log(chalk.green(`Starting Handoff Figma data pipeline. Checking for environment and config.\n`));
+ Logger.success(`Starting Handoff Figma data pipeline. Checking for environment and config.`);
await validateHandoffRequirements(handoff);
await validateFigmaAuth(handoff);
const documentationObject = await figmaExtract(handoff);
diff --git a/src/transformers/docgen/index.ts b/src/transformers/docgen/index.ts
index a109e9e1..ceff0131 100644
--- a/src/transformers/docgen/index.ts
+++ b/src/transformers/docgen/index.ts
@@ -1,6 +1,7 @@
import fs from 'fs-extra';
import path from 'path';
import { withCustomConfig } from 'react-docgen-typescript';
+import { Logger } from '../../utils/logger';
import { DocgenParserConfig, DocgenResult } from '../types';
import { convertDocgenToProperties } from '../utils/schema';
@@ -20,7 +21,7 @@ export const generatePropertiesFromDocgen = async (
// Check if tsconfig exists
if (!fs.existsSync(tsconfigPath)) {
- console.warn(`TypeScript config not found at ${tsconfigPath}, using default configuration`);
+ Logger.warn(`TypeScript config not found at ${tsconfigPath}, using default configuration`);
}
const parserConfig: DocgenParserConfig = {
@@ -47,7 +48,7 @@ export const generatePropertiesFromDocgen = async (
return null;
} catch (error) {
- console.warn(`Failed to generate docs with react-docgen-typescript for ${entry}:`, error);
+ Logger.warn(`Failed to generate docs with react-docgen-typescript for ${entry}: ${error}`);
return null;
}
};
diff --git a/src/transformers/plugins/handlebars-previews.ts b/src/transformers/plugins/handlebars-previews.ts
index 358b2bff..a4a0f883 100644
--- a/src/transformers/plugins/handlebars-previews.ts
+++ b/src/transformers/plugins/handlebars-previews.ts
@@ -4,6 +4,8 @@ import { Types as CoreTypes } from 'handoff-core';
import path from 'path';
import { Plugin } from 'vite';
import Handoff from '../..';
+import { Logger } from '../../utils/logger';
+import { createViteLogger } from '../utils/vite-logger';
import { TransformComponentTokensResult } from '../preview/types';
import { createHandlebarsContext, registerHandlebarsHelpers } from '../utils/handlebars';
import { formatHtmlWithWrapper, trimPreview } from '../utils/html';
@@ -128,6 +130,9 @@ export function handlebarsPreviewsPlugin(
return {
name: PLUGIN_CONSTANTS.PLUGIN_NAME,
apply: 'build',
+ config: () => ({
+ customLogger: createViteLogger(),
+ }),
resolveId(resolveId) {
if (resolveId === PLUGIN_CONSTANTS.SCRIPT_ID) {
return resolveId;
@@ -155,34 +160,40 @@ export function handlebarsPreviewsPlugin(
// Generate previews for each variation
for (const previewKey in componentData.previews) {
- const previewData = componentData.previews[previewKey];
-
- // Render both normal and inspect modes
- const normalModeHtml = await renderHandlebarsTemplate(
- templateContent,
- componentData,
- previewData,
- false
- );
-
- const inspectModeHtml = await renderHandlebarsTemplate(
- templateContent,
- componentData,
- previewData,
- true
- );
-
- // Emit preview files
- emitPreviewFiles(
- componentId,
- previewKey,
- normalModeHtml,
- inspectModeHtml,
- (file) => this.emitFile(file)
- );
-
- generatedPreviews[previewKey] = normalModeHtml;
- componentData.previews[previewKey].url = `${componentId}-${previewKey}.html`;
+ try {
+ const previewData = componentData.previews[previewKey];
+
+ // Render both normal and inspect modes
+ const normalModeHtml = await renderHandlebarsTemplate(
+ templateContent,
+ componentData,
+ previewData,
+ false
+ );
+
+ const inspectModeHtml = await renderHandlebarsTemplate(
+ templateContent,
+ componentData,
+ previewData,
+ true
+ );
+
+ // Emit preview files
+ emitPreviewFiles(
+ componentId,
+ previewKey,
+ normalModeHtml,
+ inspectModeHtml,
+ (file) => this.emitFile(file)
+ );
+
+ generatedPreviews[previewKey] = normalModeHtml;
+ componentData.previews[previewKey].url = `${componentId}-${previewKey}.html`;
+
+ Logger.debug(`Generated Handlebars preview: ${componentId}-${previewKey}`);
+ } catch (err) {
+ Logger.error(`Failed to generate Handlebars preview for ${componentId}-${previewKey}`, err);
+ }
}
// Update component data with results
diff --git a/src/transformers/plugins/ssr-render.ts b/src/transformers/plugins/ssr-render.ts
index ccd036ee..ca597743 100644
--- a/src/transformers/plugins/ssr-render.ts
+++ b/src/transformers/plugins/ssr-render.ts
@@ -6,6 +6,7 @@ import React from 'react';
import ReactDOMServer from 'react-dom/server';
import { Plugin, normalizePath } from 'vite';
import Handoff from '../..';
+import { Logger } from '../../utils/logger';
import { generatePropertiesFromDocgen } from '../docgen';
import { SlotMetadata } from '../preview/component';
import { TransformComponentTokensResult } from '../preview/types';
@@ -14,6 +15,7 @@ import { formatHtml, trimPreview } from '../utils/html';
import { buildAndEvaluateModule } from '../utils/module';
import { loadSchemaFromComponent, loadSchemaFromFile } from '../utils/schema-loader';
import { slugify } from '../utils/string';
+import { createViteLogger } from '../utils/vite-logger';
/**
* React component type for SSR rendering
@@ -67,7 +69,7 @@ async function loadComponentSchemaAndModule(
properties = await generatePropertiesFromDocgen(componentPath, handoff);
}
} catch (error) {
- console.warn(`Failed to load component file ${componentPath}:`, error);
+ Logger.warn(`Failed to load component file "${componentPath}": ${error}`);
}
}
@@ -77,7 +79,7 @@ async function loadComponentSchemaAndModule(
const moduleExports = await buildAndEvaluateModule(componentPath, handoff);
component = moduleExports.exports.default;
} catch (error) {
- console.error(`Failed to load component for rendering: ${componentPath}`, error);
+ Logger.error(`Failed to load component for rendering "${componentPath}":`, error);
return [null, null];
}
}
@@ -152,8 +154,11 @@ export function ssrRenderPlugin(
return {
name: PLUGIN_CONSTANTS.PLUGIN_NAME,
apply: 'build',
+ config: () => ({
+ customLogger: createViteLogger(),
+ }),
resolveId(resolveId) {
- console.log('resolveId', resolveId);
+ Logger.debug('resolveId', resolveId);
if (resolveId === PLUGIN_CONSTANTS.SCRIPT_ID) {
return resolveId;
}
@@ -183,7 +188,7 @@ export function ssrRenderPlugin(
);
if (!ReactComponent) {
- console.error(`Failed to load React component for ${componentId}`);
+ Logger.error(`Failed to load React component for ${componentId}`);
return;
}
@@ -235,6 +240,7 @@ export function ssrRenderPlugin(
// Build client-side bundle
const clientBuildConfig = {
...DEFAULT_CLIENT_BUILD_CONFIG,
+ logLevel: 'silent' as const,
stdin: {
contents: clientHydrationSource,
resolveDir: process.cwd(),
@@ -248,8 +254,22 @@ export function ssrRenderPlugin(
? handoff.config.hooks.clientBuildConfig(clientBuildConfig)
: clientBuildConfig;
- const bundledClient = await esbuild.build(finalClientBuildConfig);
- const clientBundleJs = bundledClient.outputFiles[0].text;
+ let clientBundleJs: string;
+ try {
+ const bundledClient = await esbuild.build(finalClientBuildConfig);
+ if (bundledClient.warnings.length > 0) {
+ const messages = await esbuild.formatMessages(bundledClient.warnings, { kind: 'warning', color: true });
+ messages.forEach((msg) => Logger.warn(msg));
+ }
+ clientBundleJs = bundledClient.outputFiles[0].text;
+ } catch (error: any) {
+ Logger.error(`Failed to build client bundle for ${componentId}`);
+ if (error.errors) {
+ const messages = await esbuild.formatMessages(error.errors, { kind: 'error', color: true });
+ messages.forEach((msg) => Logger.error(msg));
+ }
+ continue;
+ }
// Generate complete HTML document
finalHtml = generateHtmlDocument(
diff --git a/src/transformers/preview/component.ts b/src/transformers/preview/component.ts
index 7ae9953e..e2f4ba1b 100644
--- a/src/transformers/preview/component.ts
+++ b/src/transformers/preview/component.ts
@@ -1,8 +1,8 @@
-import chalk from 'chalk';
import fs from 'fs-extra';
import path from 'path';
import sass from 'sass';
import Handoff from '../../index';
+import { Logger } from '../../utils/logger';
import writeComponentSummaryAPI, { getAPIPath } from './component/api';
import processComponents from './component/builder';
import { buildMainCss } from './component/css';
@@ -102,7 +102,7 @@ export async function processSharedStyles(handoff: Handoff): Promise => {
+ const outputDirPath = path.resolve(getAPIPath(handoff), 'component', id);
+ const outputFilePath = path.resolve(outputDirPath, `${version}.json`);
+
+ if (fs.existsSync(outputFilePath)) {
+ try {
+ const existingJson = await fs.readFile(outputFilePath, 'utf8');
+ if (existingJson) {
+ return JSON.parse(existingJson) as TransformComponentTokensResult;
+ }
+ } catch (_) {
+ // Unable to parse existing file
+ }
+ }
+ return null;
+};
+
+/**
+ * Read the component metadata/summary (the {id}.json file)
+ * @param handoff
+ * @param id
+ * @returns The component summary or null if not found
+ */
+export const readComponentMetadataApi = async (handoff: Handoff, id: string): Promise => {
+ const outputFilePath = path.resolve(getAPIPath(handoff), 'component', `${id}.json`);
+
+ if (fs.existsSync(outputFilePath)) {
+ try {
+ const existingJson = await fs.readFile(outputFilePath, 'utf8');
+ if (existingJson) {
+ return JSON.parse(existingJson) as ComponentListObject;
+ }
+ } catch (_) {
+ // Unable to parse existing file
+ }
+ }
+ return null;
+};
+
export default writeComponentSummaryAPI;
diff --git a/src/transformers/preview/component/builder.ts b/src/transformers/preview/component/builder.ts
index 4eb2040d..9dddaa3e 100644
--- a/src/transformers/preview/component/builder.ts
+++ b/src/transformers/preview/component/builder.ts
@@ -1,9 +1,23 @@
import { Types as CoreTypes } from 'handoff-core';
import cloneDeep from 'lodash/cloneDeep';
+import {
+ BuildCache,
+ checkOutputExists,
+ computeComponentFileStates,
+ computeGlobalDepsState,
+ createEmptyCache,
+ hasComponentChanged,
+ haveGlobalDepsChanged,
+ loadBuildCache,
+ pruneRemovedComponents,
+ saveBuildCache,
+ updateComponentCacheEntry,
+} from '../../../cache';
import Handoff from '../../../index';
+import { Logger } from '../../../utils/logger';
import { ensureIds } from '../../utils/schema';
import { ComponentListObject, ComponentType, TransformComponentTokensResult } from '../types';
-import { updateComponentSummaryApi, writeComponentApi, writeComponentMetadataApi } from './api';
+import { readComponentApi, readComponentMetadataApi, updateComponentSummaryApi, writeComponentApi, writeComponentMetadataApi } from './api';
import buildComponentCss from './css';
import buildPreviews from './html';
import buildComponentJs from './javascript';
@@ -46,42 +60,49 @@ export enum ComponentSegment {
Style = 'style',
Previews = 'previews',
Validation = 'validation',
- ValidationOnly = 'validation-only',
}
+type ComponentBuildPlan = {
+ js: boolean;
+ css: boolean;
+ previews: boolean;
+ validationMode: boolean;
+};
+
/**
- * Determines which keys should be preserved based on the segment being processed.
- * When processing a specific segment, we want to preserve data from other segments
- * to avoid overwriting them with undefined values.
+ * Returns a normalized build plan describing which component segments need rebuilding.
+ *
+ * The plan consolidates the conditional logic for:
+ * - Full builds (no segment specified) where every segment should be regenerated
+ * - Targeted rebuilds where only the requested segment runs
+ * - Validation sweeps that only rebuild segments with missing artifacts
+ *
+ * @param segmentToProcess Optional segment identifier coming from the caller
+ * @param existingData Previously persisted component output (if any)
*/
-function getPreserveKeysForSegment(segmentToProcess?: ComponentSegment): string[] {
- if (!segmentToProcess) {
- return []; // No preservation needed for full updates
- }
+const createComponentBuildPlan = (
+ segmentToProcess?: ComponentSegment,
+ existingData?: TransformComponentTokensResult
+): ComponentBuildPlan => {
+ const isValidationMode = segmentToProcess === ComponentSegment.Validation;
+ const isFullBuild = !segmentToProcess;
- switch (segmentToProcess) {
- case ComponentSegment.JavaScript:
- // When processing JavaScript segment, preserve CSS and previews data
- return ['css', 'sass', 'sharedStyles', 'previews', 'validations'];
-
- case ComponentSegment.Style:
- // When processing Style segment, preserve JavaScript and previews data
- return ['js', 'jsCompiled', 'previews', 'validations'];
-
- case ComponentSegment.Previews:
- // When processing Previews segment, preserve JavaScript and CSS data
- return ['js', 'jsCompiled', 'css', 'sass', 'sharedStyles', 'validations'];
-
- case ComponentSegment.Validation:
- // When processing Validation segment, preserve all other data
- return ['js', 'jsCompiled', 'css', 'sass', 'sharedStyles', 'previews'];
- case ComponentSegment.ValidationOnly:
- // When processing ValidationOnly segment, preserve only validation data
- return ['js', 'jsCompiled', 'css', 'sass', 'sharedStyles', 'previews'];
-
- default:
- return [];
- }
+ const previewsMissing = !existingData?.code || Object.values(existingData?.previews || {}).some((preview) => !preview?.url);
+
+ return {
+ js: isFullBuild || segmentToProcess === ComponentSegment.JavaScript || (isValidationMode && !existingData?.js),
+ css: isFullBuild || segmentToProcess === ComponentSegment.Style || (isValidationMode && !existingData?.css),
+ previews: isFullBuild || segmentToProcess === ComponentSegment.Previews || (isValidationMode && previewsMissing),
+ validationMode: isValidationMode,
+ };
+};
+
+/**
+ * Options for processing components
+ */
+export interface ProcessComponentsOptions {
+ /** Enable caching to skip unchanged components */
+ useCache?: boolean;
}
/**
@@ -89,12 +110,14 @@ function getPreserveKeysForSegment(segmentToProcess?: ComponentSegment): string[
* @param handoff - The Handoff instance containing configuration and state
* @param id - Optional component ID to process a specific component
* @param segmentToProcess - Optional segment to update
+ * @param options - Optional processing options including cache settings
* @returns Promise resolving to an array of processed components
*/
export async function processComponents(
handoff: Handoff,
id?: string,
- segmentToProcess?: ComponentSegment
+ segmentToProcess?: ComponentSegment,
+ options?: ProcessComponentsOptions
): Promise {
const result: ComponentListObject[] = [];
@@ -102,72 +125,250 @@ export async function processComponents(
const components = documentationObject?.components ?? ({} as CoreTypes.IDocumentationObject['components']);
const sharedStyles = await handoff.getSharedStyles();
const runtimeComponents = handoff.runtimeConfig?.entries?.components ?? {};
-
- // Determine which keys to preserve based on the segment being processed
- // This ensures that when processing only specific segments (e.g., JavaScript only),
- // we don't overwrite data from other segments (e.g., CSS, previews) with undefined values
- const preserveKeys = getPreserveKeysForSegment(segmentToProcess);
+ const allComponentIds = Object.keys(runtimeComponents);
+
+ // Determine which components need building based on cache (when enabled)
+ let componentsToBuild: Set;
+ let cache: BuildCache | null = null;
+ let currentGlobalDeps = {};
+ const componentFileStatesMap: Map>>> = new Map();
- for (const runtimeComponentId of Object.keys(runtimeComponents)) {
+ // Only use caching when:
+ // - useCache option is enabled
+ // - No specific component ID is requested (full build scenario)
+ // - No specific segment is requested (full build scenario)
+ // - Force flag is not set
+ const shouldUseCache = options?.useCache && !id && !segmentToProcess && !handoff.force;
+
+ if (shouldUseCache) {
+ Logger.debug('Loading build cache...');
+ cache = await loadBuildCache(handoff);
+ currentGlobalDeps = await computeGlobalDepsState(handoff);
+ const globalDepsChanged = haveGlobalDepsChanged(cache?.globalDeps, currentGlobalDeps);
+
+ if (globalDepsChanged) {
+ Logger.info('Global dependencies changed, rebuilding all components');
+ componentsToBuild = new Set(allComponentIds);
+ } else {
+ Logger.debug('Global dependencies unchanged');
+ componentsToBuild = new Set();
+
+ // Evaluate each component independently
+ for (const componentId of allComponentIds) {
+ const versions = Object.keys(runtimeComponents[componentId]);
+ let needsBuild = false;
+
+ // Store file states for later cache update
+ const versionStatesMap = new Map>>();
+ componentFileStatesMap.set(componentId, versionStatesMap);
+
+ for (const version of versions) {
+ const currentFileStates = await computeComponentFileStates(handoff, componentId, version);
+ versionStatesMap.set(version, currentFileStates);
+
+ const cachedEntry = cache?.components?.[componentId]?.[version];
+
+ if (!cachedEntry) {
+ Logger.info(`Component '${componentId}@${version}': new component, will build`);
+ needsBuild = true;
+ } else if (hasComponentChanged(cachedEntry, currentFileStates)) {
+ Logger.info(`Component '${componentId}@${version}': source files changed, will rebuild`);
+ needsBuild = true;
+ } else if (!(await checkOutputExists(handoff, componentId, version))) {
+ Logger.info(`Component '${componentId}@${version}': output missing, will rebuild`);
+ needsBuild = true;
+ }
+ }
+
+ if (needsBuild) {
+ componentsToBuild.add(componentId);
+ } else {
+ Logger.info(`Component '${componentId}': unchanged, skipping`);
+ }
+ }
+ }
+
+ // Prune removed components from cache
+ if (cache) {
+ pruneRemovedComponents(cache, allComponentIds);
+ }
+
+ const skippedCount = allComponentIds.length - componentsToBuild.size;
+ if (skippedCount > 0) {
+ Logger.info(`Building ${componentsToBuild.size} of ${allComponentIds.length} components (${skippedCount} unchanged)`);
+ } else if (componentsToBuild.size > 0) {
+ Logger.info(`Building all ${componentsToBuild.size} components`);
+ } else {
+ Logger.info('All components up to date, nothing to build');
+ }
+ } else {
+ // No caching - build all requested components
+ componentsToBuild = new Set(allComponentIds);
+ }
+
+ for (const runtimeComponentId of allComponentIds) {
+ // Skip if specific ID requested and doesn't match
if (!!id && runtimeComponentId !== id) {
continue;
}
+ // Skip if caching is enabled and this component doesn't need building
+ if (shouldUseCache && !componentsToBuild.has(runtimeComponentId)) {
+ // Even though we're skipping the build, we need to include this component's
+ // existing summary in the result to prevent data loss in components.json
+ const existingSummary = await readComponentMetadataApi(handoff, runtimeComponentId);
+ if (existingSummary) {
+ result.push(existingSummary);
+ }
+ continue;
+ }
+
const versions = Object.keys(runtimeComponents[runtimeComponentId]);
const latest = getLatestVersionForComponent(versions);
let latestVersion: TransformComponentTokensResult | undefined;
await Promise.all(
versions.map(async (version) => {
+ // Select the current component metadata from the runtime config for this id/version.
+ // Separate out `type` to enforce/rewrite it during build.
const runtimeComponent = runtimeComponents[runtimeComponentId][version];
const { type, ...restMetadata } = runtimeComponent;
+ // Attempt to load any existing persisted component output (previous build for this id/version).
+ // This is used for incremental/partial rebuilds to retain previously generated segments when not rebuilding all.
+ const existingData = await readComponentApi(handoff, runtimeComponentId, version);
+
+ // Compose the base in-memory data for building this component:
+ // - Start from a deep clone of the defaultComponent (to avoid mutation bugs)
+ // - Merge in metadata from the current runtime configuration (from config/docs)
+ // - Explicitly set `type` (defaults to Element if not provided)
let data: TransformComponentTokensResult = {
...cloneDeep(defaultComponent),
...restMetadata,
type: (type as ComponentType) || ComponentType.Element,
};
- if (!segmentToProcess || segmentToProcess === ComponentSegment.JavaScript || segmentToProcess === ComponentSegment.Validation) {
+ // buildPlan captures which segments need work for this run.
+ const buildPlan = createComponentBuildPlan(segmentToProcess, existingData);
+
+ /**
+ * Merge segment data from existing version if this segment is *not* being rebuilt.
+ * This ensures that when only one segment (e.g., Javascript, CSS, Previews) is being updated,
+ * other fields retain their previous values. This avoids unnecessary overwrites or data loss
+ * when doing segmented or partial builds.
+ */
+ if (existingData) {
+ // If we're not building JS, carry forward the previous JS output.
+ if (!buildPlan.js) {
+ data.js = existingData.js;
+ }
+ // If we're not building CSS/Sass, keep the earlier CSS and Sass outputs.
+ if (!buildPlan.css) {
+ data.css = existingData.css;
+ data.sass = existingData.sass;
+ }
+ // If we're not building previews, preserve pre-existing HTML, code snippet, and previews.
+ if (!buildPlan.previews) {
+ data.html = existingData.html;
+ data.code = existingData.code;
+ data.previews = existingData.previews;
+ }
+ /**
+ * Always keep validation results from the previous data,
+ * unless this run is specifically doing a validation update.
+ * This keeps validations current without unnecessary recomputation or accidental removal.
+ */
+ if (!buildPlan.validationMode) {
+ data.validations = existingData.validations;
+ }
+ }
+
+ // Build JS if needed (new build, validation missing, or explicit segment request).
+ if (buildPlan.js) {
data = await buildComponentJs(data, handoff);
}
- if (!segmentToProcess || segmentToProcess === ComponentSegment.Style || segmentToProcess === ComponentSegment.Validation) {
+ // Build CSS if needed.
+ if (buildPlan.css) {
data = await buildComponentCss(data, handoff, sharedStyles);
}
-
- if (!segmentToProcess || segmentToProcess === ComponentSegment.Previews || segmentToProcess === ComponentSegment.Validation) {
+ // Build previews (HTML, snapshots, etc) if needed.
+ if (buildPlan.previews) {
data = await buildPreviews(data, handoff, components);
}
- if (
- (segmentToProcess === ComponentSegment.Validation || segmentToProcess === ComponentSegment.ValidationOnly)
- && handoff.config?.hooks?.validateComponent) {
+ /**
+ * Run validation if explicitly requested and a hook is configured.
+ * This allows custom logic to assess the validity of the generated component data.
+ */
+ if (buildPlan.validationMode && handoff.config?.hooks?.validateComponent) {
const validationResults = await handoff.config.hooks.validateComponent(data);
data.validations = validationResults;
}
+ // Attach the resolved sharedStyles to the component data for persistence and downstream usage.
data.sharedStyles = sharedStyles;
- // recurse through all properties and ensure that every property has an id
+
+ // Ensure that every property within the properties array/object contains an 'id' field.
+ // This guarantees unique identification for property entries, which is useful for updates and API consumers.
data.properties = ensureIds(data.properties);
- await writeComponentApi(runtimeComponentId, data, version, handoff, preserveKeys);
+ // Write the updated component data to the corresponding API file (by component ID and version) for external access and caching.
+ await writeComponentApi(runtimeComponentId, data, version, handoff, []);
+ // Store the latest version's full data for potential summary writing after all versions are processed.
if (version === latest) {
latestVersion = data;
}
})
);
+ /**
+ * After processing all requested versions for this component:
+ * - If a latestVersion was produced, write a 'latest.json' API file for the component (points to the most recent/primary version).
+ * - Build a summary object for this component and write it to its summary API file.
+ * - Add the summary to the global result list for summary/index construction.
+ * If no version could be processed for this component, throw an error.
+ */
if (latestVersion) {
- await writeComponentApi(runtimeComponentId, latestVersion, 'latest', handoff, preserveKeys);
+ // Write the 'latest.json' snapshot for quick access to the most up-to-date version.
+ await writeComponentApi(runtimeComponentId, latestVersion, 'latest', handoff, []);
+ // Build the summary metadata for this component (includes all versions, properties, previews, etc).
const summary = buildComponentSummary(runtimeComponentId, latestVersion, versions);
+ // Store the summary as a per-component JSON file for documentation or API use.
await writeComponentMetadataApi(runtimeComponentId, summary, handoff);
+ // Add to the cumulative results, to later update the global components summary file.
result.push(summary);
+
+ // Update cache entries for this component after successful build
+ if (shouldUseCache) {
+ if (!cache) {
+ cache = createEmptyCache();
+ }
+ const versionStatesMap = componentFileStatesMap.get(runtimeComponentId);
+ if (versionStatesMap) {
+ for (const [version, fileStates] of Array.from(versionStatesMap)) {
+ updateComponentCacheEntry(cache, runtimeComponentId, version, fileStates);
+ }
+ } else {
+ // Compute file states if not already computed (e.g., when global deps changed)
+ for (const version of versions) {
+ const fileStates = await computeComponentFileStates(handoff, runtimeComponentId, version);
+ updateComponentCacheEntry(cache, runtimeComponentId, version, fileStates);
+ }
+ }
+ }
} else {
+ // Defensive: Throw a clear error if somehow no version was processed for this component.
throw new Error(`No latest version found for ${runtimeComponentId}`);
}
}
+ // Save the updated cache
+ if (shouldUseCache && cache) {
+ cache.globalDeps = currentGlobalDeps;
+ await saveBuildCache(handoff, cache);
+ }
+
// Always merge and write summary file, even if no components processed
const isFullRebuild = !id;
await updateComponentSummaryApi(handoff, result, isFullRebuild);
diff --git a/src/transformers/preview/component/css.ts b/src/transformers/preview/component/css.ts
index 00840c02..f3e9746d 100644
--- a/src/transformers/preview/component/css.ts
+++ b/src/transformers/preview/component/css.ts
@@ -1,8 +1,8 @@
-import chalk from 'chalk';
import fs from 'fs-extra';
import path from 'path';
import { InlineConfig, build as viteBuild } from 'vite';
import Handoff, { initRuntimeConfig } from '../../../index';
+import { Logger } from '../../../utils/logger';
import viteBaseConfig from '../../config';
import { getComponentOutputPath } from '../component';
import { TransformComponentTokensResult } from '../types';
@@ -82,7 +82,7 @@ const buildCssBundle = async ({
await viteBuild(viteConfig);
} catch (e) {
- console.log(chalk.red(`Error building CSS for ${entry}`));
+ Logger.error(`Failed to build CSS for "${entry}"`);
throw e;
} finally {
// Restore the original NODE_ENV value
@@ -96,7 +96,7 @@ const buildCssBundle = async ({
const buildComponentCss = async (data: TransformComponentTokensResult, handoff: Handoff, sharedStyles: string) => {
const id = data.id;
- console.log('buildComponentCss ------------------------------', id);
+ Logger.debug(`buildComponentCss`, id);
const entry = data.entries?.scss;
if (!entry) {
return data;
@@ -158,7 +158,7 @@ const buildComponentCss = async (data: TransformComponentTokensResult, handoff:
}
}
} catch (e) {
- console.log(chalk.red(`Error building CSS for ${id}`));
+ Logger.error(`Failed to build CSS for "${id}"`);
throw e;
}
@@ -177,7 +177,7 @@ export const buildMainCss = async (handoff: Handoff): Promise => {
const entryPath = stat.isDirectory() ? path.resolve(runtimeConfig.entries.scss, 'main.scss') : runtimeConfig.entries.scss;
if (entryPath === runtimeConfig.entries.scss || fs.existsSync(entryPath)) {
- console.log(chalk.green(`Building main CSS file`));
+ Logger.success(`Building main CSS file...`);
try {
// Setup SASS load paths
@@ -199,8 +199,7 @@ export const buildMainCss = async (handoff: Handoff): Promise => {
handoff,
});
} catch (e) {
- console.log(chalk.red(`Error building main CSS`));
- console.log(e);
+ Logger.error(`Failed to build main CSS:`, e);
}
}
}
diff --git a/src/transformers/preview/component/html.ts b/src/transformers/preview/component/html.ts
index fa3eb7a0..64a05d3d 100644
--- a/src/transformers/preview/component/html.ts
+++ b/src/transformers/preview/component/html.ts
@@ -2,6 +2,7 @@ import react from '@vitejs/plugin-react';
import { Types as CoreTypes } from 'handoff-core';
import { InlineConfig, build as viteBuild } from 'vite';
import Handoff from '../../../index';
+import { Logger } from '../../../utils/logger';
import viteBaseConfig from '../../config';
import { handlebarsPreviewsPlugin, ssrRenderPlugin } from '../../plugins';
import { getComponentOutputPath } from '../component';
@@ -61,7 +62,7 @@ export const buildPreviews = async (
await viteBuild(viteConfig);
} catch (error) {
- console.error(`Error building component previews: ${data.entries.template}`, error);
+ Logger.error(`Error building component previews: ${data.entries.template}`, error);
} finally {
// Restore the original NODE_ENV value after vite build completes
// This prevents interference with Next.js app building/running processes
diff --git a/src/transformers/preview/component/javascript.ts b/src/transformers/preview/component/javascript.ts
index b347770b..1718917c 100644
--- a/src/transformers/preview/component/javascript.ts
+++ b/src/transformers/preview/component/javascript.ts
@@ -1,8 +1,8 @@
-import chalk from 'chalk';
import fs from 'fs-extra';
import path from 'path';
import { InlineConfig, build as viteBuild } from 'vite';
import Handoff, { initRuntimeConfig } from '../../../index';
+import { Logger } from '../../../utils/logger';
import viteBaseConfig from '../../config';
import { getComponentOutputPath } from '../component';
import { TransformComponentTokensResult } from '../types';
@@ -54,7 +54,7 @@ const buildJsBundle = async (
await viteBuild(viteConfig);
} catch (e) {
- console.error(chalk.red(`Error building ${outputFilename}`), e);
+ Logger.error(`Failed to build JS for "${outputFilename}":`, e);
} finally {
// Restore the original NODE_ENV value after vite build completes
// This prevents interference with Next.js app building/running processes
@@ -98,7 +98,7 @@ export const buildComponentJs = async (data: TransformComponentTokensResult, han
const compiled = await fs.readFile(path.resolve(outputPath, `${id}.js`), 'utf8');
data['jsCompiled'] = compiled;
} catch (e) {
- console.error(`[Component JS Build Error] ${id}:`, e);
+ Logger.error(`JS build failed for component "${id}":`, e);
}
return data;
diff --git a/src/transformers/preview/types.ts b/src/transformers/preview/types.ts
index 77f2d988..9c393ca0 100644
--- a/src/transformers/preview/types.ts
+++ b/src/transformers/preview/types.ts
@@ -105,8 +105,8 @@ export type ComponentObject = {
js?: string;
/** Optional path to the main SCSS/CSS file (if available) */
scss?: string;
- /** Optional path(s) to component template file(s) (if available) */
- templates?: string;
+ /** Optional path to component template file (if available) */
+ template?: string;
};
/** Schema describing the expected properties (props/slots) for the component */
properties: { [key: string]: SlotMetadata };
diff --git a/src/transformers/utils/handlebars.ts b/src/transformers/utils/handlebars.ts
index 3291a825..bfea0cd1 100644
--- a/src/transformers/utils/handlebars.ts
+++ b/src/transformers/utils/handlebars.ts
@@ -1,4 +1,5 @@
import Handlebars from 'handlebars';
+import { Logger } from '../../utils/logger';
import { SlotMetadata } from '../preview/component';
import { HandlebarsContext } from '../types';
@@ -15,7 +16,7 @@ export const registerHandlebarsHelpers = (
Handlebars.registerHelper('field', function (field: string, options: any) {
if (injectFieldWrappers) {
if (!field) {
- console.error(`Missing field declaration for ${data.id}`);
+ Logger.error(`Missing field declaration for ${data.id}`);
return options.fn(this);
}
@@ -29,7 +30,7 @@ export const registerHandlebarsHelpers = (
}
if (!current) {
- console.error(`Undefined field path for ${data.id}`);
+ Logger.error(`Undefined field path for ${data.id}`);
return options.fn(this);
}
diff --git a/src/transformers/utils/schema-loader.ts b/src/transformers/utils/schema-loader.ts
index e6dea063..87cda772 100644
--- a/src/transformers/utils/schema-loader.ts
+++ b/src/transformers/utils/schema-loader.ts
@@ -1,4 +1,5 @@
import path from 'path';
+import { Logger } from '../../utils/logger';
import { generatePropertiesFromDocgen } from '../docgen';
import { SlotMetadata } from '../preview/component';
import { buildAndEvaluateModule } from './module';
@@ -17,7 +18,7 @@ export const loadSchemaFromFile = async (
const ext = path.extname(schemaPath);
if (ext !== '.ts' && ext !== '.tsx') {
- console.warn(`Schema file has unsupported extension: ${ext}`);
+ Logger.warn(`Unsupported schema file extension: ${ext}`);
return null;
}
@@ -40,7 +41,7 @@ export const loadSchemaFromFile = async (
return null;
} catch (error) {
- console.warn(`Failed to load separate schema file ${schemaPath}:`, error);
+ Logger.warn(`Failed to load schema file "${schemaPath}": ${error}`);
return null;
}
};
diff --git a/src/transformers/utils/schema.ts b/src/transformers/utils/schema.ts
index 6f69a359..86497762 100644
--- a/src/transformers/utils/schema.ts
+++ b/src/transformers/utils/schema.ts
@@ -1,3 +1,4 @@
+import { Logger } from '../../utils/logger';
import { SlotMetadata, SlotType } from '../preview/component';
/**
@@ -93,7 +94,7 @@ export const loadSchemaFromExports = (
return schema;
} catch (error) {
- console.warn(`Failed to load schema from exports (${exportKey}):`, error);
+ Logger.warn(`Failed to load schema from exports (${exportKey}): ${error}`);
return null;
}
};
diff --git a/src/transformers/utils/vite-logger.ts b/src/transformers/utils/vite-logger.ts
new file mode 100644
index 00000000..cac04e22
--- /dev/null
+++ b/src/transformers/utils/vite-logger.ts
@@ -0,0 +1,33 @@
+import { LogOptions, Logger as ViteLogger } from 'vite';
+import { Logger } from '../../utils/logger';
+
+export const createViteLogger = (): ViteLogger => {
+ const warnedMessages = new Set();
+
+ return {
+ hasWarned: false,
+ info(msg: string, options?: LogOptions) {
+ Logger.info(msg);
+ },
+ warn(msg: string, options?: LogOptions) {
+ this.hasWarned = true;
+ Logger.warn(msg);
+ },
+ warnOnce(msg: string, options?: LogOptions) {
+ if (warnedMessages.has(msg)) return;
+ warnedMessages.add(msg);
+ this.hasWarned = true;
+ Logger.warn(msg);
+ },
+ error(msg: string, options?: LogOptions) {
+ Logger.error(msg);
+ },
+ clearScreen(type: string) {
+ // No-op to preserve terminal history
+ },
+ hasErrorLogged(error: Error) {
+ return false;
+ },
+ };
+};
+
diff --git a/src/utils/filter.ts b/src/utils/filter.ts
index 8a71566d..25f49240 100644
--- a/src/utils/filter.ts
+++ b/src/utils/filter.ts
@@ -77,10 +77,6 @@ function evaluateFieldFilter(obj: Record, filter: FieldFilter): Fil
const { field, op, value } = filter;
const actual = obj[field];
- if (op === 'neq') {
- console.log('EVAL', filter, actual, actual !== value);
- }
-
switch (op) {
case 'eq':
return { matches: actual === value };
diff --git a/src/utils/logger.ts b/src/utils/logger.ts
new file mode 100644
index 00000000..f0e712fc
--- /dev/null
+++ b/src/utils/logger.ts
@@ -0,0 +1,49 @@
+import chalk from 'chalk';
+
+export class Logger {
+ private static debugMode = false;
+
+ static init(options?: { debug?: boolean }) {
+ if (options?.debug !== undefined) {
+ this.debugMode = options.debug;
+ }
+ }
+
+ private static getTimestamp(): string {
+ const now = new Date();
+ return chalk.gray(`[${now.toISOString()}]`);
+ }
+
+ static log(message: string) {
+ console.log(`${this.getTimestamp()} ${message}`);
+ }
+
+ static info(message: string) {
+ console.log(`${this.getTimestamp()} ${chalk.cyan(message)}`);
+ }
+
+ static success(message: string) {
+ console.log(`${this.getTimestamp()} ${chalk.green(message)}`);
+ }
+
+ static warn(message: string) {
+ console.warn(`${this.getTimestamp()} ${chalk.yellow(message)}`);
+ }
+
+ static error(message: string, error?: any) {
+ console.error(`${this.getTimestamp()} ${chalk.red(message)}`);
+ if (error) {
+ console.error(error);
+ }
+ }
+
+ static debug(message: string, data?: any) {
+ if (this.debugMode) {
+ console.log(`${this.getTimestamp()} ${chalk.gray(`[DEBUG] ${message}`)}`);
+ if (data) {
+ console.log(data);
+ }
+ }
+ }
+}
+