From 351952d5b2a44c1e70c774ad9022641a35728338 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 1 Apr 2025 14:46:55 -0700 Subject: [PATCH 01/75] scaffolding token-analyzer package --- nx.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nx.json b/nx.json index 8fe633f7d..09a6ebd2d 100644 --- a/nx.json +++ b/nx.json @@ -38,6 +38,11 @@ "options": { "packageRoot": "dist/packages/{projectName}" } + }, + "@nx/js:swc": { + "cache": true, + "dependsOn": ["^build"], + "inputs": ["production", "^production"] } }, "namedInputs": { From a1199bf14a40e2aae78b8a73c39f4ad73f190497 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 1 Apr 2025 17:18:39 -0700 Subject: [PATCH 02/75] adding logic changes --- packages/token-analyzer/eslint.config.cjs | 19 - packages/token-analyzer/package.json | 14 +- .../src/__tests__/analyzer.test.ts | 82 +++ .../src/__tests__/cssVarE2E.test.ts | 311 ++++++++++ .../src/__tests__/moduleResolver.test.ts | 205 +++++++ .../src/__tests__/packageImports.test.ts | 187 ++++++ .../src/__tests__/reexportTracking.test.ts | 177 ++++++ .../src/__tests__/sample-styles.ts | 53 ++ .../src/__tests__/typeCheckerImports.test.ts | 152 +++++ .../src/__tests__/verifyFileExists.test.ts | 104 ++++ packages/token-analyzer/src/astAnalyzer.ts | 541 +++++++++++++++++ .../src/cssVarTokenExtractor.ts | 90 +++ packages/token-analyzer/src/debugUtils.ts | 57 ++ packages/token-analyzer/src/fileOperations.ts | 70 +++ packages/token-analyzer/src/importAnalyzer.ts | 558 ++++++++++++++++++ packages/token-analyzer/src/index.ts | 131 +++- packages/token-analyzer/src/moduleResolver.ts | 204 +++++++ packages/token-analyzer/src/tokenUtils.ts | 94 +++ packages/token-analyzer/src/types.ts | 54 ++ 19 files changed, 3082 insertions(+), 21 deletions(-) delete mode 100644 packages/token-analyzer/eslint.config.cjs create mode 100644 packages/token-analyzer/src/__tests__/analyzer.test.ts create mode 100644 packages/token-analyzer/src/__tests__/cssVarE2E.test.ts create mode 100644 packages/token-analyzer/src/__tests__/moduleResolver.test.ts create mode 100644 packages/token-analyzer/src/__tests__/packageImports.test.ts create mode 100644 packages/token-analyzer/src/__tests__/reexportTracking.test.ts create mode 100644 packages/token-analyzer/src/__tests__/sample-styles.ts create mode 100644 packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts create mode 100644 packages/token-analyzer/src/__tests__/verifyFileExists.test.ts create mode 100644 packages/token-analyzer/src/astAnalyzer.ts create mode 100644 packages/token-analyzer/src/cssVarTokenExtractor.ts create mode 100644 packages/token-analyzer/src/debugUtils.ts create mode 100644 packages/token-analyzer/src/fileOperations.ts create mode 100644 packages/token-analyzer/src/importAnalyzer.ts create mode 100644 packages/token-analyzer/src/moduleResolver.ts create mode 100644 packages/token-analyzer/src/tokenUtils.ts create mode 100644 packages/token-analyzer/src/types.ts diff --git a/packages/token-analyzer/eslint.config.cjs b/packages/token-analyzer/eslint.config.cjs deleted file mode 100644 index 9d2af7a3d..000000000 --- a/packages/token-analyzer/eslint.config.cjs +++ /dev/null @@ -1,19 +0,0 @@ -const baseConfig = require('../../eslint.config.js'); - -module.exports = [ - ...baseConfig, - { - files: ['**/*.json'], - rules: { - '@nx/dependency-checks': [ - 'error', - { - ignoredFiles: ['{projectRoot}/eslint.config.{js,cjs,mjs}'], - }, - ], - }, - languageOptions: { - parser: require('jsonc-eslint-parser'), - }, - }, -]; diff --git a/packages/token-analyzer/package.json b/packages/token-analyzer/package.json index 1df9080d5..c6ac2f1af 100644 --- a/packages/token-analyzer/package.json +++ b/packages/token-analyzer/package.json @@ -3,6 +3,18 @@ "version": "0.0.1", "main": "./src/index.js", "types": "./src/index.d.ts", - "dependencies": {}, + "dependencies": { + "ts-morph": "24.0.1", + "typescript": "5.7.3", + "prettier": "2.8.8" + }, + "devDependencies": { + "@griffel/react": "^1.5.22" + }, + "scripts": { + "analyze-tokens": "NODE_OPTIONS=\"--loader ts-node/esm\" ts-node-esm src/index.ts", + "test": "jest", + "test:debug": "node --loader ts-node/esm --inspect-brk node_modules/.bin/jest --runInBand" + }, "private": true } diff --git a/packages/token-analyzer/src/__tests__/analyzer.test.ts b/packages/token-analyzer/src/__tests__/analyzer.test.ts new file mode 100644 index 000000000..1d6390842 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/analyzer.test.ts @@ -0,0 +1,82 @@ +import { Project } from 'ts-morph'; +import { analyzeFile } from '../astAnalyzer.js'; +import { sampleStyles } from './sample-styles.js'; +import * as path from 'path'; +import * as fs from 'fs/promises'; + +describe('Token Analyzer', () => { + let project: Project; + let tempFilePath: string; + + beforeAll(async () => { + // Create temp directory for test files + const tempDir = path.join(process.cwd(), 'temp-test-files'); + await fs.mkdir(tempDir, { recursive: true }); + tempFilePath = path.join(tempDir, 'test-styles.ts'); + await fs.writeFile(tempFilePath, sampleStyles); + + project = new Project({ + skipAddingFilesFromTsConfig: true, + skipFileDependencyResolution: false, + }); + }); + + afterAll(async () => { + // Cleanup temp files + const tempDir = path.join(process.cwd(), 'temp-test-files'); + await fs.rm(tempDir, { recursive: true, force: true }); + }); + + it('should analyze styles and find tokens', async () => { + const analysis = await analyzeFile(tempFilePath, project); + + // Verify the structure matches what we expect + expect(analysis).toHaveProperty('styles'); + expect(analysis).toHaveProperty('metadata'); + + const { styles, metadata } = analysis; + + // Verify root styles + expect(styles.useStyles.root.tokens).toContainEqual( + expect.objectContaining({ + property: 'color', + token: 'tokens.colorNeutralForeground1', + }), + ); + expect(styles.useStyles.root.tokens).toContainEqual( + expect.objectContaining({ + property: 'borderRightColor', + token: 'tokens.colorNeutralStrokeDisabled', + }), + ); + + // Verify anotherSlot styles + expect(styles.useStyles.anotherSlot.tokens).toContainEqual( + expect.objectContaining({ + property: 'color', + token: 'tokens.colorNeutralForeground2', + }), + ); + + // Verify focus function styles + expect(styles.useStyles.focusIndicator.tokens).toEqual([]); + const focusStyle = styles.useStyles.focusIndicator.nested?.[':focus']; + + expect(focusStyle?.tokens[0]).toEqual({ + path: [], + property: 'textDecorationColor', + token: 'tokens.colorStrokeFocus2', + }); + + // Verify metadata for conditional styles + expect(metadata.styleConditions['styles.large']).toEqual({ + conditions: ["size === 'large'"], + slotName: 'root', + }); + expect(metadata.styleConditions['styles.disabled']).toEqual({ + conditions: ['disabled'], + slotName: 'root', + }); + expect(metadata.styleConditions['styles.large'].conditions).toContain("size === 'large'"); + }); +}); diff --git a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts new file mode 100644 index 000000000..73826e726 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts @@ -0,0 +1,311 @@ +// cssVarE2E.test.ts +import { Project } from 'ts-morph'; +import { analyzeFile } from '../astAnalyzer.js'; +import * as path from 'path'; +import * as fs from 'fs/promises'; + +// Test file contents +const cssVarsStyleFile = ` +import { makeStyles } from '@griffel/react'; +import { tokens } from '@fluentui/react-theme'; +import { colorPrimary, colorSecondary, nestedFallbackVar, complexCssVar } from './tokenVars'; + +const useStyles = makeStyles({ + // Direct token reference + direct: { + color: tokens.colorNeutralForeground1, + }, + // CSS variable with token + cssVar: { + color: \`var(--theme-color, \${tokens.colorBrandForeground4})\`, + }, + // Imported direct token + importedToken: { + color: colorPrimary, + }, + // Imported CSS variable with token + importedCssVar: { + color: colorSecondary, + }, + // Nested CSS variable with token + nestedCssVar: { + background: \`var(--primary, var(--secondary, \${tokens.colorBrandForeground2}))\`, + }, + // Imported nested CSS variable with token + importedNestedVar: { + color: nestedFallbackVar, + }, + // Imported complex CSS variable with multiple tokens + importedComplexVar: { + color: complexCssVar, + }, +}); +`; + +const tokenVarsFile = ` +import { tokens } from '@fluentui/react-theme'; +// Direct token exports +export const colorPrimary = tokens.colorBrandForeground6; +export const colorSecondary = \`var(--color, \${tokens.colorBrandForeground3})\`; + +// Nested fallback vars +export const nestedFallbackVar = \`var(--a, var(--b, \${tokens.colorNeutralForeground3}))\`; + +// Complex vars with multiple tokens +export const complexCssVar = \`var(--complex, var(--nested, \${tokens.colorBrandBackground})) var(--another, \${tokens.colorNeutralBackground1})\`; +`; + +describe('CSS Variable Token Extraction E2E', () => { + let project: Project; + let tempDir: string; + let stylesFilePath: string; + let varsFilePath: string; + + beforeAll(async () => { + // Create temp directory for test files + tempDir = path.join(__dirname, 'temp-e2e-test'); + await fs.mkdir(tempDir, { recursive: true }); + + // Create test files + stylesFilePath = path.join(tempDir, 'test.styles.ts'); + varsFilePath = path.join(tempDir, 'tokenVars.ts'); + + await fs.writeFile(stylesFilePath, cssVarsStyleFile); + await fs.writeFile(varsFilePath, tokenVarsFile); + + // Initialize project + project = new Project({ + tsConfigFilePath: path.join(tempDir, '../../../tsconfig.json'), + skipAddingFilesFromTsConfig: true, + }); + }); + + afterAll(async () => { + // Clean up temp files + await fs.rm(tempDir, { recursive: true, force: true }); + }); + + test('analyzes and extracts all token references from CSS variables', async () => { + // Run the analyzer on our test files + const analysis = await analyzeFile(stylesFilePath, project); + + // Verify the overall structure + expect(analysis).toHaveProperty('styles'); + expect(analysis).toHaveProperty('metadata'); + + const { styles } = analysis; + expect(styles).toHaveProperty('useStyles'); + + const useStyles = styles.useStyles; + + // 1. Verify direct token reference + expect(useStyles.direct.tokens.length).toBe(1); + expect(useStyles.direct.tokens).toContainEqual( + expect.objectContaining({ + property: 'color', + token: 'tokens.colorNeutralForeground1', + }), + ); + + // 2. Verify CSS variable with token + expect(useStyles.cssVar.tokens.length).toBe(1); + expect(useStyles.cssVar.tokens).toContainEqual( + expect.objectContaining({ + property: 'color', + token: 'tokens.colorBrandForeground4', + }), + ); + + // 3. Verify imported direct token + expect(useStyles.importedToken.tokens.length).toBe(1); + expect(useStyles.importedToken.tokens).toContainEqual( + expect.objectContaining({ + property: 'color', + token: 'tokens.colorBrandForeground6', + isVariableReference: true, + }), + ); + + // 4. Verify imported CSS variable with token + expect(useStyles.importedCssVar.tokens.length).toBe(1); + expect(useStyles.importedCssVar.tokens).toContainEqual( + expect.objectContaining({ + property: 'color', + token: 'tokens.colorBrandForeground3', + isVariableReference: true, + }), + ); + + // 5. Verify nested CSS variable with token + expect(useStyles.nestedCssVar.tokens.length).toBe(1); + expect(useStyles.nestedCssVar.tokens).toContainEqual( + expect.objectContaining({ + property: 'background', + token: 'tokens.colorBrandForeground2', + }), + ); + + // 6. Verify imported nested CSS variable with token + expect(useStyles.importedNestedVar.tokens.length).toBe(1); + expect(useStyles.importedNestedVar.tokens).toContainEqual( + expect.objectContaining({ + property: 'color', + token: 'tokens.colorNeutralForeground3', + isVariableReference: true, + }), + ); + + // 8. Verify imported complex CSS variable with multiple tokens + expect(useStyles.importedComplexVar.tokens.length).toBe(2); + expect(useStyles.importedComplexVar.tokens).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + token: 'tokens.colorBrandBackground', + isVariableReference: true, + }), + expect.objectContaining({ + token: 'tokens.colorNeutralBackground1', + isVariableReference: true, + }), + ]), + ); + }); +}); + +// This test focuses on the full end-to-end integration of the CSS variable extraction +// with the module resolution system +describe('CSS Variable Cross-Module Resolution E2E', () => { + let project: Project; + let tempDir: string; + + beforeAll(async () => { + // Create temp directory and subdirectories for test files + tempDir = path.join(__dirname, 'temp-cross-module-test'); + const varsDir = path.join(tempDir, 'tokens'); + const stylesDir = path.join(tempDir, 'styles'); + + await fs.mkdir(varsDir, { recursive: true }); + await fs.mkdir(stylesDir, { recursive: true }); + + // Create a deeper structure to test cross-module resolution + await fs.writeFile( + path.join(varsDir, 'colors.ts'), + ` + import { tokens } from '@fluentui/react-theme'; + // Base token definitions + export const primaryToken = tokens.colorBrandPrimary; + export const secondaryToken = tokens.colorBrandSecondary; + export const furtherMargin = tokens.spacingVerticalXXL; + `, + ); + + await fs.writeFile( + path.join(varsDir, 'variables.ts'), + ` + import { primaryToken, secondaryToken, furtherMargin } from './colors'; + import { tokens } from '@fluentui/react-theme'; + + // CSS Variables referencing tokens + export const primaryVar = \`var(--primary, \${tokens.colorBrandPrimary})\`; + export const nestedVar = \`var(--nested, var(--fallback, \${tokens.colorBrandSecondary}))\`; + export const multiTokenVar = \`var(--multi, \${primaryToken} \${tokens.colorBrandSecondary})\`; + export const someMargin = tokens.spacingHorizontalXXL; + export const someOtherMargin = furtherMargin; + `, + ); + + await fs.writeFile( + path.join(varsDir, 'index.ts'), + ` + // Re-export everything + export * from './colors'; + export * from './variables'; + `, + ); + + await fs.writeFile( + path.join(stylesDir, 'component.styles.ts'), + ` + import { makeStyles } from '@griffel/react'; + import { primaryToken, primaryVar, nestedVar, multiTokenVar, someMargin, someOtherMargin } from '../tokens'; + + const useStyles = makeStyles({ + root: { + // Direct import + color: primaryToken, + // CSS var import + backgroundColor: primaryVar, + // Nested CSS var import + border: nestedVar, + // Complex var with multiple tokens + padding: multiTokenVar, + // aliased and imported CSS var + marginRight:someMargin, + // aliased and imported CSS var with another level of indirection + marginRight:someOtherMargin + } + }); + + export default useStyles; + `, + ); + + // Initialize project + project = new Project({ + tsConfigFilePath: path.join(tempDir, '../../../tsconfig.json'), + skipAddingFilesFromTsConfig: true, + }); + }); + + afterAll(async () => { + // Clean up temp files + await fs.rm(tempDir, { recursive: true, force: true }); + }); + + test('resolves token references across module boundaries with CSS vars', async () => { + // Run the analyzer on the component styles file + const componentPath = path.join(tempDir, 'styles', 'component.styles.ts'); + const analysis = await analyzeFile(componentPath, project); + + const { styles } = analysis; + expect(styles).toHaveProperty('useStyles'); + + const useStyles = styles.useStyles; + const rootStyle = useStyles.root; + + // Verify tokens were extracted from all import types + expect(rootStyle.tokens).toEqual( + expect.arrayContaining([ + // Direct import of token + expect.objectContaining({ + property: 'color', + token: 'tokens.colorBrandPrimary', + isVariableReference: true, + }), + // Import of CSS var with token + expect.objectContaining({ + property: 'backgroundColor', + token: 'tokens.colorBrandPrimary', + isVariableReference: true, + }), + // Import of nested CSS var with token + expect.objectContaining({ + property: 'border', + token: 'tokens.colorBrandSecondary', + isVariableReference: true, + }), + // Multiple tokens from a complex var + expect.objectContaining({ + property: 'padding', + token: 'tokens.colorBrandPrimary', + isVariableReference: true, + }), + expect.objectContaining({ + property: 'padding', + token: 'tokens.colorBrandSecondary', + isVariableReference: true, + }), + ]), + ); + }); +}); diff --git a/packages/token-analyzer/src/__tests__/moduleResolver.test.ts b/packages/token-analyzer/src/__tests__/moduleResolver.test.ts new file mode 100644 index 000000000..1fe1364e1 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/moduleResolver.test.ts @@ -0,0 +1,205 @@ +// moduleResolver.test.ts +import { ModuleResolutionKind, Project, ScriptTarget } from 'ts-morph'; +import { + resolveModulePath, + getModuleSourceFile, + clearModuleCache, + tsUtils, + modulePathCache, + resolvedFilesCache, +} from '../moduleResolver'; +import * as path from 'path'; +import * as fs from 'fs'; + +// Setup test directory and files +const TEST_DIR = path.join(__dirname, 'test-module-resolver'); + +beforeAll(() => { + if (!fs.existsSync(TEST_DIR)) { + fs.mkdirSync(TEST_DIR, { recursive: true }); + } + + // Create test files + fs.writeFileSync( + path.join(TEST_DIR, 'source.ts'), + ` + import { func } from './utils'; + import { theme } from './styles/theme'; + import defaultExport from './constants'; + + const x = func(); + `, + ); + + fs.writeFileSync( + path.join(TEST_DIR, 'utils.ts'), + ` + export const func = () => 'test'; + `, + ); + + fs.mkdirSync(path.join(TEST_DIR, 'styles'), { recursive: true }); + fs.writeFileSync( + path.join(TEST_DIR, 'styles/theme.ts'), + ` + export const theme = { + primary: 'tokens.colors.primary', + secondary: 'tokens.colors.secondary' + }; + `, + ); + + fs.writeFileSync( + path.join(TEST_DIR, 'constants.ts'), + ` + export default 'tokens.default.value'; + `, + ); + + // Create a file with extension in the import + fs.writeFileSync( + path.join(TEST_DIR, 'with-extension.ts'), + ` + import { func } from './utils.ts'; + `, + ); +}); + +afterAll(() => { + if (fs.existsSync(TEST_DIR)) { + fs.rmSync(TEST_DIR, { recursive: true, force: true }); + } +}); + +describe('Module resolver functions', () => { + let project: Project; + + beforeEach(() => { + // Create a fresh project for each test + project = new Project({ + compilerOptions: { + target: ScriptTarget.ES2020, + moduleResolution: ModuleResolutionKind.NodeNext, + }, + }); + + // Clear caches + clearModuleCache(); + }); + + describe('resolveModulePath', () => { + test('resolves relative path correctly', () => { + const sourceFilePath = path.join(TEST_DIR, 'source.ts'); + const result = resolveModulePath(project, './utils', sourceFilePath); + + expect(result).not.toBeNull(); + expect(result).toEqual(path.join(TEST_DIR, 'utils.ts')); + }); + + test('resolves nested relative path correctly', () => { + const sourceFilePath = path.join(TEST_DIR, 'source.ts'); + const result = resolveModulePath(project, './styles/theme', sourceFilePath); + + expect(result).not.toBeNull(); + expect(result).toEqual(path.join(TEST_DIR, 'styles/theme.ts')); + }); + + test('resolves path with file extension', () => { + const sourceFilePath = path.join(TEST_DIR, 'with-extension.ts'); + const result = resolveModulePath(project, './utils.ts', sourceFilePath); + + expect(result).not.toBeNull(); + expect(result).toEqual(path.join(TEST_DIR, 'utils.ts')); + }); + + test('returns null for non-existent module', () => { + const sourceFilePath = path.join(TEST_DIR, 'source.ts'); + const result = resolveModulePath(project, './non-existent', sourceFilePath); + + expect(result).toBeNull(); + }); + + test('caches resolution results', () => { + const sourceFilePath = path.join(TEST_DIR, 'source.ts'); + + // First call should resolve + const firstResult = resolveModulePath(project, './utils', sourceFilePath); + expect(firstResult).not.toBeNull(); + + // Mock the TS resolution to verify cache is used + const originalResolve = tsUtils.resolveModuleName; + tsUtils.resolveModuleName = jest.fn().mockImplementation(() => { + throw new Error('Should not be called if cache is working'); + }); + + // Second call should use cache + const secondResult = resolveModulePath(project, './utils', sourceFilePath); + expect(secondResult).toEqual(firstResult); + + // Restore original function + tsUtils.resolveModuleName = originalResolve; + }); + }); + + describe('getModuleSourceFile', () => { + test('returns source file for valid module', () => { + const sourceFilePath = path.join(TEST_DIR, 'source.ts'); + project.addSourceFileAtPath(sourceFilePath); + + const result = getModuleSourceFile(project, './utils', sourceFilePath); + + expect(result).not.toBeNull(); + expect(result?.getFilePath()).toEqual(path.join(TEST_DIR, 'utils.ts')); + }); + + test('caches source files', () => { + const sourceFilePath = path.join(TEST_DIR, 'source.ts'); + project.addSourceFileAtPath(sourceFilePath); + + // First call + const firstResult = getModuleSourceFile(project, './utils', sourceFilePath); + expect(firstResult).not.toBeNull(); + + // Mock project.addSourceFileAtPath to verify cache is used + const originalAddSourceFile = project.addSourceFileAtPath; + project.addSourceFileAtPath = jest.fn().mockImplementation(() => { + throw new Error('Should not be called if cache is working'); + }); + + // Second call should use cache + const secondResult = getModuleSourceFile(project, './utils', sourceFilePath); + expect(secondResult).toBe(firstResult); // Same instance + + // Restore original function + project.addSourceFileAtPath = originalAddSourceFile; + }); + + test('returns null for non-existent module', () => { + const sourceFilePath = path.join(TEST_DIR, 'source.ts'); + project.addSourceFileAtPath(sourceFilePath); + + const result = getModuleSourceFile(project, './non-existent', sourceFilePath); + expect(result).toBeNull(); + }); + }); + + test('clearModuleCache clears both caches', () => { + const sourceFilePath = path.join(TEST_DIR, 'source.ts'); + project.addSourceFileAtPath(sourceFilePath); + + // Fill the caches + getModuleSourceFile(project, './utils', sourceFilePath); + getModuleSourceFile(project, './styles/theme', sourceFilePath); + + // Verify caches were filled + expect(modulePathCache.size).toBeGreaterThan(0); + expect(resolvedFilesCache.size).toBeGreaterThan(0); + + // Clear caches + clearModuleCache(); + + // Directly verify caches are empty + expect(modulePathCache.size).toBe(0); + expect(resolvedFilesCache.size).toBe(0); + }); +}); diff --git a/packages/token-analyzer/src/__tests__/packageImports.test.ts b/packages/token-analyzer/src/__tests__/packageImports.test.ts new file mode 100644 index 000000000..5977f7b6b --- /dev/null +++ b/packages/token-analyzer/src/__tests__/packageImports.test.ts @@ -0,0 +1,187 @@ +// packageImports.test.ts +import { Project, ModuleResolutionKind, ScriptTarget } from 'ts-morph'; +import { resolveModulePath, clearModuleCache, tsUtils } from '../moduleResolver'; +import * as path from 'path'; +import * as fs from 'fs'; + +// Setup test directory and mock node_modules structure +const TEST_DIR = path.join(__dirname, 'test-package-imports'); +const NODE_MODULES = path.join(TEST_DIR, 'node_modules'); +const SCOPED_PACKAGE = path.join(NODE_MODULES, '@scope', 'package'); +const REGULAR_PACKAGE = path.join(NODE_MODULES, 'some-package'); + +beforeAll(() => { + // Create test directories + if (!fs.existsSync(TEST_DIR)) { + fs.mkdirSync(TEST_DIR, { recursive: true }); + } + if (!fs.existsSync(SCOPED_PACKAGE)) { + fs.mkdirSync(SCOPED_PACKAGE, { recursive: true }); + } + if (!fs.existsSync(REGULAR_PACKAGE)) { + fs.mkdirSync(REGULAR_PACKAGE, { recursive: true }); + } + + // Create a source file that imports from packages + fs.writeFileSync( + path.join(TEST_DIR, 'source.ts'), + ` + import { Component } from '@scope/package'; + import { helper } from 'some-package'; + `, + ); + + // Create package.json and index files for the scoped package + fs.writeFileSync( + path.join(SCOPED_PACKAGE, 'package.json'), + JSON.stringify({ + name: '@scope/package', + version: '1.0.0', + main: 'index.js', + }), + ); + fs.writeFileSync( + path.join(SCOPED_PACKAGE, 'index.js'), + ` + export const Component = { + theme: 'tokens.components.primary' + }; + `, + ); + + // Create package.json and index files for the regular package + fs.writeFileSync( + path.join(REGULAR_PACKAGE, 'package.json'), + JSON.stringify({ + name: 'some-package', + version: '1.0.0', + main: './lib/index.js', + }), + ); + + // Create lib directory in the regular package + fs.mkdirSync(path.join(REGULAR_PACKAGE, 'lib'), { recursive: true }); + + fs.writeFileSync( + path.join(REGULAR_PACKAGE, 'lib', 'index.js'), + ` + export const helper = 'tokens.helpers.main'; + `, + ); +}); + +afterAll(() => { + if (fs.existsSync(TEST_DIR)) { + fs.rmSync(TEST_DIR, { recursive: true, force: true }); + } +}); + +describe('Package imports resolution', () => { + let project: Project; + let originalResolve: any; + let originalFileExists: any; + + beforeEach(() => { + project = new Project({ + compilerOptions: { + target: ScriptTarget.ES2020, + moduleResolution: ModuleResolutionKind.NodeNext, + }, + }); + + // Setup workspace + project.addSourceFileAtPath(path.join(TEST_DIR, 'source.ts')); + + // Clear caches + clearModuleCache(); + + // Store original functions + originalResolve = tsUtils.resolveModuleName; + originalFileExists = tsUtils.fileExists; + + // Mock fileExists to handle our mock node_modules + tsUtils.fileExists = jest.fn().mockImplementation((filePath: string) => { + return fs.existsSync(filePath); + }); + }); + + afterEach(() => { + // Restore original functions + tsUtils.resolveModuleName = originalResolve; + tsUtils.fileExists = originalFileExists; + }); + + test('resolves scoped package imports correctly', () => { + const sourceFilePath = path.join(TEST_DIR, 'source.ts'); + + // Mock the TypeScript resolution for scoped packages + tsUtils.resolveModuleName = jest + .fn() + .mockImplementation((moduleName: string, containingFile: string, compilerOptions: any, host: any) => { + if (moduleName === '@scope/package') { + return { + resolvedModule: { + resolvedFileName: path.join(SCOPED_PACKAGE, 'index.js'), + extension: '.js', + isExternalLibraryImport: true, + }, + }; + } + // Call original for other cases + return originalResolve(moduleName, containingFile, compilerOptions, host); + }); + + const result = resolveModulePath(project, '@scope/package', sourceFilePath); + + expect(result).not.toBeNull(); + expect(result).toEqual(path.join(SCOPED_PACKAGE, 'index.js')); + expect(tsUtils.resolveModuleName).toHaveBeenCalled(); + }); + + test('resolves regular package imports with non-standard main path', () => { + const sourceFilePath = path.join(TEST_DIR, 'source.ts'); + + // Mock the TypeScript resolution for regular packages + tsUtils.resolveModuleName = jest + .fn() + .mockImplementation((moduleName: string, containingFile: string, compilerOptions: any, host: any) => { + if (moduleName === 'some-package') { + return { + resolvedModule: { + resolvedFileName: path.join(REGULAR_PACKAGE, 'lib', 'index.js'), + extension: '.js', + isExternalLibraryImport: true, + }, + }; + } + // Call original for other cases + return originalResolve(moduleName, containingFile, compilerOptions, host); + }); + + const result = resolveModulePath(project, 'some-package', sourceFilePath); + + expect(result).not.toBeNull(); + expect(result).toEqual(path.join(REGULAR_PACKAGE, 'lib', 'index.js')); + expect(tsUtils.resolveModuleName).toHaveBeenCalled(); + }); + + test('returns null for non-existent packages', () => { + const sourceFilePath = path.join(TEST_DIR, 'source.ts'); + + // Mock the TypeScript resolution to return null for non-existent packages + tsUtils.resolveModuleName = jest + .fn() + .mockImplementation((moduleName: string, containingFile: string, compilerOptions: any, host: any) => { + if (moduleName === 'non-existent-package') { + return { resolvedModule: undefined }; + } + // Call original for other cases + return originalResolve(moduleName, containingFile, compilerOptions, host); + }); + + const result = resolveModulePath(project, 'non-existent-package', sourceFilePath); + + expect(result).toBeNull(); + expect(tsUtils.resolveModuleName).toHaveBeenCalled(); + }); +}); diff --git a/packages/token-analyzer/src/__tests__/reexportTracking.test.ts b/packages/token-analyzer/src/__tests__/reexportTracking.test.ts new file mode 100644 index 000000000..338deae74 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/reexportTracking.test.ts @@ -0,0 +1,177 @@ +// reexportTracking.test.ts +import { Project } from 'ts-morph'; +import { analyzeImports, ImportedValue } from '../importAnalyzer'; +import * as path from 'path'; +import * as fs from 'fs'; + +// Setup test directory with a chain of re-exports +const TEST_DIR = path.join(__dirname, 'test-reexports'); + +beforeAll(() => { + if (!fs.existsSync(TEST_DIR)) { + fs.mkdirSync(TEST_DIR, { recursive: true }); + } + + // Create a main file that imports from an index + fs.writeFileSync( + path.join(TEST_DIR, 'main.ts'), + ` + import { Component, AliasedValue, Utils, DirectValue } from './index'; + import DefaultExport from './index'; + + const styles = { + component: Component, + alias: AliasedValue, + utils: Utils, + direct: DirectValue, + default: DefaultExport + }; + `, + ); + + // Create an index file that re-exports everything + fs.writeFileSync( + path.join(TEST_DIR, 'index.ts'), + ` + // Re-export from components + export { Component } from './components'; + + // Re-export with alias + export { Value as AliasedValue } from './values'; + + // Re-export all from utils + export * from './utils'; + + // Direct export + export const DirectValue = 'tokens.direct.value'; + + // Re-export default + export { default } from './defaults'; + `, + ); + + // Create a components file + fs.writeFileSync( + path.join(TEST_DIR, 'components.ts'), + ` + export const Component = 'tokens.components.primary'; + `, + ); + + // Create a values file + fs.writeFileSync( + path.join(TEST_DIR, 'values.ts'), + ` + export const Value = 'tokens.values.standard'; + `, + ); + + // Create a utils file + fs.writeFileSync( + path.join(TEST_DIR, 'utils.ts'), + ` + export const Utils = 'tokens.utils.helper'; + `, + ); + + // Create a defaults file + fs.writeFileSync( + path.join(TEST_DIR, 'defaults.ts'), + ` + const DefaultValue = 'tokens.defaults.main'; + export default DefaultValue; + `, + ); +}); + +afterAll(() => { + if (fs.existsSync(TEST_DIR)) { + fs.rmSync(TEST_DIR, { recursive: true, force: true }); + } +}); + +describe('Re-export tracking', () => { + let project: Project; + + beforeEach(() => { + // Create a project using the existing directory structure + // This makes it easier to test without needing to override compiler options + project = new Project({ + tsConfigFilePath: path.join(TEST_DIR, '../../../tsconfig.json'), + skipAddingFilesFromTsConfig: true, + }); + + // Create a minimal tsconfig.json + fs.writeFileSync( + path.join(TEST_DIR, 'tsconfig.json'), + JSON.stringify({ + compilerOptions: { + target: 'es2020', + moduleResolution: 'node', + esModuleInterop: true, + skipLibCheck: true, + }, + }), + ); + }); + + test('follows standard re-export chain', async () => { + const mainFile = path.join(TEST_DIR, 'main.ts'); + const sourceFile = project.addSourceFileAtPath(mainFile); + + const importedValues: Map = await analyzeImports(sourceFile, project); + + // Check that Component was correctly resolved from components.ts + expect(importedValues.has('Component')).toBe(true); + expect(importedValues.get('Component')?.value).toBe('tokens.components.primary'); + expect(importedValues.get('Component')?.sourceFile).toContain('components.ts'); + }); + + test('follows aliased re-export chain', async () => { + const mainFile = path.join(TEST_DIR, 'main.ts'); + const sourceFile = project.addSourceFileAtPath(mainFile); + + const importedValues: Map = await analyzeImports(sourceFile, project); + + // Check that AliasedValue was correctly resolved from values.ts + expect(importedValues.has('AliasedValue')).toBe(true); + expect(importedValues.get('AliasedValue')?.value).toBe('tokens.values.standard'); + expect(importedValues.get('AliasedValue')?.sourceFile).toContain('values.ts'); + }); + + test('follows namespace re-export', async () => { + const mainFile = path.join(TEST_DIR, 'main.ts'); + const sourceFile = project.addSourceFileAtPath(mainFile); + + const importedValues: Map = await analyzeImports(sourceFile, project); + + // Check that Utils from namespace export was correctly resolved + expect(importedValues.has('Utils')).toBe(true); + expect(importedValues.get('Utils')?.value).toBe('tokens.utils.helper'); + expect(importedValues.get('Utils')?.sourceFile).toContain('utils.ts'); + }); + + test('handles direct exports in the same file', async () => { + const mainFile = path.join(TEST_DIR, 'main.ts'); + const sourceFile = project.addSourceFileAtPath(mainFile); + + const importedValues: Map = await analyzeImports(sourceFile, project); + + // Check that DirectValue was correctly resolved from index.ts + expect(importedValues.has('DirectValue')).toBe(true); + expect(importedValues.get('DirectValue')?.value).toBe('tokens.direct.value'); + expect(importedValues.get('DirectValue')?.sourceFile).toContain('index.ts'); + }); + + test('follows default export chain', async () => { + const mainFile = path.join(TEST_DIR, 'main.ts'); + const sourceFile = project.addSourceFileAtPath(mainFile); + + const importedValues: Map = await analyzeImports(sourceFile, project); + + // Check that DefaultExport was correctly resolved from defaults.ts + expect(importedValues.has('DefaultExport')).toBe(true); + expect(importedValues.get('DefaultExport')?.value).toBe('tokens.defaults.main'); + expect(importedValues.get('DefaultExport')?.sourceFile).toContain('defaults.ts'); + }); +}); diff --git a/packages/token-analyzer/src/__tests__/sample-styles.ts b/packages/token-analyzer/src/__tests__/sample-styles.ts new file mode 100644 index 000000000..8deebab2f --- /dev/null +++ b/packages/token-analyzer/src/__tests__/sample-styles.ts @@ -0,0 +1,53 @@ +export const sampleStyles = ` +import { makeStyles, mergeClasses } from '@griffel/react'; +import { tokens } from '@fluentui/react-theme'; +import { createCustomFocusIndicatorStyle } from '@fluentui/react-tabster'; + +const useStyles = makeStyles({ + focusIndicator: createCustomFocusIndicatorStyle({ + textDecorationColor: tokens.colorStrokeFocus2, + }), + root: { + color: tokens.colorNeutralForeground1, + backgroundColor: tokens.colorNeutralBackground1, + ...shorthands.borderColor(tokens.colorNeutralStrokeDisabled), + ':hover': { + color: tokens.colorNeutralForegroundHover, + } + }, + large: { + fontSize: tokens.fontSizeBase600, + }, + disabled: { + color: tokens.colorNeutralForegroundDisabled, + }, + anotherSlot: { + color: tokens.colorNeutralForeground2, + } +}); + +export const Component = () => { + const styles = useStyles(); + + const state = {root:{}, anotherSlot: {}} + + state.root.className = mergeClasses( + styles.root, + styles.focusIndicator, + size === 'large' && styles.large, + disabled && styles.disabled, + state.root.className + ); + + state.anotherSlot.className = mergeClasses( + styles.anotherSlot, + state.anotherSlot.className + ); + + return ( +
+
+
+ ); +}; +`; diff --git a/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts b/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts new file mode 100644 index 000000000..890ae2621 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts @@ -0,0 +1,152 @@ +// typeCheckerImports.test.ts +import { Project } from 'ts-morph'; +import { analyzeImports, ImportedValue } from '../importAnalyzer'; +import * as path from 'path'; +import * as fs from 'fs'; + +// Setup test directory with a chain of re-exports +const TEST_DIR = path.join(__dirname, 'test-type-checker'); + +beforeAll(() => { + if (!fs.existsSync(TEST_DIR)) { + fs.mkdirSync(TEST_DIR, { recursive: true }); + } + + // Create a main file that imports from an index + fs.writeFileSync( + path.join(TEST_DIR, 'main.ts'), + ` + import { Component, AliasedValue, Utils, DirectValue } from './index'; + import DefaultExport from './index'; + + const styles = { + component: Component, + alias: AliasedValue, + utils: Utils, + direct: DirectValue, + default: DefaultExport + }; + `, + ); + + // Create an index file that re-exports everything + fs.writeFileSync( + path.join(TEST_DIR, 'index.ts'), + ` + // Re-export from components + export { Component } from './components'; + + // Re-export with alias + export { Value as AliasedValue } from './values'; + + // Re-export all from utils + export * from './utils'; + + // Direct export + export const DirectValue = 'tokens.direct.value'; + + // Re-export default + export { default } from './defaults'; + `, + ); + + // Create a components file + fs.writeFileSync( + path.join(TEST_DIR, 'components.ts'), + ` + export const Component = 'tokens.components.primary'; + `, + ); + + // Create a values file + fs.writeFileSync( + path.join(TEST_DIR, 'values.ts'), + ` + export const Value = 'tokens.values.standard'; + `, + ); + + // Create a utils file + fs.writeFileSync( + path.join(TEST_DIR, 'utils.ts'), + ` + export const Utils = 'tokens.utils.helper'; + `, + ); + + // Create a defaults file + fs.writeFileSync( + path.join(TEST_DIR, 'defaults.ts'), + ` + const DefaultValue = 'tokens.defaults.main'; + export default DefaultValue; + `, + ); +}); + +afterAll(() => { + if (fs.existsSync(TEST_DIR)) { + fs.rmSync(TEST_DIR, { recursive: true, force: true }); + } +}); + +describe('Type Checker Import Analysis', () => { + let project: Project; + + beforeEach(() => { + // Create a project using the existing directory structure + // This makes it easier to test without needing to override compiler options + project = new Project({ + tsConfigFilePath: path.join(TEST_DIR, '../../../tsconfig.json'), + skipAddingFilesFromTsConfig: true, + }); + + // Create a minimal tsconfig.json + fs.writeFileSync( + path.join(TEST_DIR, 'tsconfig.json'), + JSON.stringify({ + compilerOptions: { + target: 'es2020', + moduleResolution: 'node', + esModuleInterop: true, + skipLibCheck: true, + }, + }), + ); + }); + + test('follows all re-export types using type checker', async () => { + const mainFile = path.join(TEST_DIR, 'main.ts'); + const sourceFile = project.addSourceFileAtPath(mainFile); + + // Add all other files to ensure project has complete type information + project.addSourceFilesAtPaths([path.join(TEST_DIR, '**/*.ts')]); + + const importedValues: Map = await analyzeImports(sourceFile, project); + + // Verify standard re-export (Component) + expect(importedValues.has('Component')).toBe(true); + expect(importedValues.get('Component')?.value).toBe('tokens.components.primary'); + expect(importedValues.get('Component')?.sourceFile).toContain('components.ts'); + + // Verify aliased re-export (AliasedValue) + expect(importedValues.has('AliasedValue')).toBe(true); + expect(importedValues.get('AliasedValue')?.value).toBe('tokens.values.standard'); + expect(importedValues.get('AliasedValue')?.sourceFile).toContain('values.ts'); + + // Verify namespace re-export (Utils) + expect(importedValues.has('Utils')).toBe(true); + expect(importedValues.get('Utils')?.value).toBe('tokens.utils.helper'); + expect(importedValues.get('Utils')?.sourceFile).toContain('utils.ts'); + + // Verify direct export (DirectValue) + expect(importedValues.has('DirectValue')).toBe(true); + expect(importedValues.get('DirectValue')?.value).toBe('tokens.direct.value'); + expect(importedValues.get('DirectValue')?.sourceFile).toContain('index.ts'); + + // Verify default export (DefaultExport) + expect(importedValues.has('DefaultExport')).toBe(true); + expect(importedValues.get('DefaultExport')?.value).toBe('tokens.defaults.main'); + expect(importedValues.get('DefaultExport')?.sourceFile).toContain('defaults.ts'); + }); +}); diff --git a/packages/token-analyzer/src/__tests__/verifyFileExists.test.ts b/packages/token-analyzer/src/__tests__/verifyFileExists.test.ts new file mode 100644 index 000000000..a9f5937da --- /dev/null +++ b/packages/token-analyzer/src/__tests__/verifyFileExists.test.ts @@ -0,0 +1,104 @@ +// verifyFileExists.test.ts +import * as path from 'path'; +import * as fs from 'fs'; +import { tsUtils, verifyFileExists } from '../moduleResolver'; + +// Setup test directory and files +const TEST_DIR = path.join(__dirname, 'test-verify-files'); +const EXISTING_FILE = path.join(TEST_DIR, 'exists.txt'); +const NON_EXISTENT_FILE = path.join(TEST_DIR, 'does-not-exist.txt'); + +beforeAll(() => { + if (!fs.existsSync(TEST_DIR)) { + fs.mkdirSync(TEST_DIR, { recursive: true }); + } + + // Create a file we know exists + fs.writeFileSync(EXISTING_FILE, 'This file exists'); + + // Make sure our non-existent file really doesn't exist + if (fs.existsSync(NON_EXISTENT_FILE)) { + fs.unlinkSync(NON_EXISTENT_FILE); + } +}); + +afterAll(() => { + if (fs.existsSync(TEST_DIR)) { + fs.rmSync(TEST_DIR, { recursive: true, force: true }); + } +}); + +describe('verifyFileExists', () => { + // Store original functions to restore after tests + const originalFileExists = tsUtils.fileExists; + + afterEach(() => { + // Restore original functions after each test + tsUtils.fileExists = originalFileExists; + }); + + test('returns true for existing files', () => { + expect(verifyFileExists(EXISTING_FILE)).toBe(true); + }); + + test('returns false for non-existent files', () => { + expect(verifyFileExists(NON_EXISTENT_FILE)).toBe(false); + }); + + test('returns false for null or undefined paths', () => { + expect(verifyFileExists(null)).toBe(false); + expect(verifyFileExists(undefined)).toBe(false); + }); + + test('uses tsUtils.fileExists when available', () => { + // Mock the tsUtils.fileExists function + tsUtils.fileExists = jest.fn().mockImplementation(filePath => { + return filePath === EXISTING_FILE; + }); + + expect(verifyFileExists(EXISTING_FILE)).toBe(true); + expect(verifyFileExists(NON_EXISTENT_FILE)).toBe(false); + expect(tsUtils.fileExists).toHaveBeenCalledTimes(2); + }); + + test('falls back to fs.existsSync when tsUtils.fileExists throws', () => { + // Mock tsUtils.fileExists to throw an error + tsUtils.fileExists = jest.fn().mockImplementation(() => { + throw new Error('fileExists not available'); + }); + + // Spy on fs.existsSync + const existsSyncSpy = jest.spyOn(fs, 'existsSync'); + + // Test should still work using fs.existsSync + expect(verifyFileExists(EXISTING_FILE)).toBe(true); + expect(verifyFileExists(NON_EXISTENT_FILE)).toBe(false); + + // Verify tsUtils.fileExists was called and threw an error + expect(tsUtils.fileExists).toHaveBeenCalledTimes(2); + + // Verify fs.existsSync was used as fallback + expect(existsSyncSpy).toHaveBeenCalledTimes(2); + + // Restore the original spy + existsSyncSpy.mockRestore(); + }); + + test('returns false when both fileExists mechanisms fail', () => { + // Mock tsUtils.fileExists to throw + tsUtils.fileExists = jest.fn().mockImplementation(() => { + throw new Error('fileExists not available'); + }); + + // Mock fs.existsSync to throw + const existsSyncSpy = jest.spyOn(fs, 'existsSync').mockImplementation(() => { + throw new Error('existsSync not available'); + }); + + // Should safely return false when everything fails + expect(verifyFileExists(EXISTING_FILE)).toBe(false); + + // Restore the original spy + existsSyncSpy.mockRestore(); + }); +}); diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts new file mode 100644 index 000000000..3b02ce229 --- /dev/null +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -0,0 +1,541 @@ +/* eslint-disable no-console */ +import { Project, Node, SourceFile, PropertyAssignment, SpreadAssignment } from 'ts-morph'; +import { + TokenReference, + StyleAnalysis, + FileAnalysis, + StyleCondition, + StyleContent, + StyleMetadata, + TOKEN_REGEX, + StyleTokens, +} from './types.js'; +import { log, measure, measureAsync } from './debugUtils.js'; +import { analyzeImports, processImportedStringTokens, ImportedValue } from './importAnalyzer.js'; +import { extractTokensFromCssVars } from './cssVarTokenExtractor.js'; +import { extractTokensFromText, getPropertiesForShorthand, isTokenReference } from './tokenUtils.js'; + +const makeResetStylesToken = 'resetStyles'; + +interface StyleMapping { + baseStyles: string[]; + conditionalStyles: StyleCondition[]; + slotName?: string; +} + +interface VariableMapping { + variableName: string; + functionName: string; +} + +/** + * Process a style property to extract token references. + * Property names are derived from the actual CSS property in the path, + * not the object key containing them. + * + * @param prop The property assignment or spread element to process + * @param importedValues Map of imported values for resolving token references + * @param isResetStyles Whether this is a reset styles property + */ +function processStyleProperty( + prop: PropertyAssignment | SpreadAssignment, + importedValues: Map | undefined = undefined, + isResetStyles?: Boolean, +): TokenReference[] { + const tokens: TokenReference[] = []; + const parentName = Node.isPropertyAssignment(prop) ? prop.getName() : ''; + + function processNode(node?: Node, path: string[] = []): void { + if (!node) { + return; + } + + // If we're processing a reset style, we need to add the parent name to the path + if (isResetStyles && path.length === 0 && parentName) { + path.push(parentName); + } + + // Check for string literals or template expressions (string template literals) + if (Node.isStringLiteral(node) || Node.isTemplateExpression(node)) { + const text = node.getText().replace(/['"]/g, ''); // Remove quotes + + // Check for CSS var() syntax that might contain tokens + if (text.includes('var(')) { + const cssVarTokens = extractTokensFromCssVars(text, path[path.length - 1] || parentName, path, TOKEN_REGEX); + tokens.push(...cssVarTokens); + } else { + // Check for direct token references + const matches = extractTokensFromText(node); + if (matches.length > 0) { + matches.forEach(match => { + tokens.push({ + property: path[path.length - 1] || parentName, + token: match, + path, + }); + }); + } + } + } else if (Node.isIdentifier(node)) { + const text = node.getText(); + + // First check if it matches the token regex directly + const matches = extractTokensFromText(node); + if (matches.length > 0) { + matches.forEach(match => { + tokens.push({ + property: path[path.length - 1] || parentName, + token: match, + path, + }); + }); + } + + // Then check if it's an imported value reference + if (importedValues && importedValues.has(text)) { + const importTokens = processImportedStringTokens( + importedValues, + path[path.length - 1] || parentName, + text, + path, + TOKEN_REGEX, + ); + tokens.push(...importTokens); + } + } else if (Node.isPropertyAccessExpression(node)) { + const text = node.getText(); + const isToken = isTokenReference(text); + if (isToken) { + tokens.push({ + property: path[path.length - 1] || parentName, + token: text, + path, + }); + } + } else if (Node.isObjectLiteralExpression(node)) { + node.getProperties().forEach(childProp => { + if (Node.isPropertyAssignment(childProp)) { + const childName = childProp.getName(); + processNode(childProp.getInitializer(), [...path, childName]); + } else if (Node.isSpreadAssignment(childProp)) { + // Handle spread elements in object literals + processNode(childProp.getExpression(), path); + } + }); + } else if (Node.isSpreadAssignment(node)) { + // Handle spread elements + processNode(node.getExpression(), path); + } else if (Node.isCallExpression(node) && node.getExpression().getText() === 'createCustomFocusIndicatorStyle') { + const focus = `:focus`; + const focusWithin = `:focus-within`; + let nestedModifier = focus; + + const passedTokens = node.getArguments()[0]; + const passedOptions = node.getArguments()[1]; + + if (passedOptions && Node.isObjectLiteralExpression(passedOptions)) { + passedOptions.getProperties().forEach(property => { + if (Node.isPropertyAssignment(property)) { + const optionName = property.getName(); + if (optionName === 'selector') { + const selectorType = property.getInitializer()?.getText(); + if (selectorType === 'focus') { + nestedModifier = focus; + } else if (selectorType === 'focus-within') { + nestedModifier = focusWithin; + } + } + } + }); + } + + if (passedTokens && Node.isObjectLiteralExpression(passedTokens)) { + passedTokens.getProperties().forEach(property => { + if (Node.isPropertyAssignment(property)) { + const childName = property.getName(); + processNode(property.getInitializer(), [...path, nestedModifier, childName]); + } + }); + } + } else if (Node.isCallExpression(node)) { + // Process calls like shorthands.borderColor(tokens.color) + const functionName = node.getExpression().getText(); + // we should pass the number of arguments so we can properly map which overload is being called. + const affectedProperties = getPropertiesForShorthand(functionName); + + if (affectedProperties.length > 0) { + // Process each argument and apply it to all affected properties + node.getArguments().forEach(argument => { + processNodeForAffectedProperties(argument, affectedProperties, path); + }); + } else { + // Generic handling of functions that are not whitelisted + node.getArguments().forEach(argument => { + if (Node.isObjectLiteralExpression(argument)) { + argument.getProperties().forEach(property => { + if (Node.isPropertyAssignment(property)) { + const childName = property.getName(); + processNode(property.getInitializer(), [...path, functionName, childName]); + } + }); + } + // Check for string literals in function arguments that might contain CSS variables with tokens + if (Node.isStringLiteral(argument)) { + const text = argument.getText().replace(/['"]/g, ''); + if (text.includes('var(')) { + const cssVarTokens = extractTokensFromCssVars( + text, + path[path.length - 1] || parentName, + [...path, functionName], + TOKEN_REGEX, + ); + tokens.push(...cssVarTokens); + } + } + }); + } + } + } + + // Helper function to process nodes for multiple affected properties + function processNodeForAffectedProperties(node: Node, properties: string[], basePath: string[]): void { + if (!node) { + return; + } + + // If this is a direct token reference + if (Node.isPropertyAccessExpression(node) && isTokenReference(node)) { + properties.forEach(property => { + tokens.push({ + property, + token: node.getText(), + path: basePath.concat(property), + }); + }); + return; + } + + // If this is an identifier that might be a variable + if (Node.isIdentifier(node) && importedValues && importedValues.has(node.getText())) { + properties.forEach(property => { + const importTokens = processImportedStringTokens( + importedValues, + property, + node.getText(), + basePath, + TOKEN_REGEX, + ); + tokens.push(...importTokens); + }); + return; + } + + // For other node types, process them normally but with each property + if (Node.isStringLiteral(node) || Node.isTemplateExpression(node)) { + const text = node.getText().replace(/['"]/g, ''); + + // Check for tokens in the text + const matches = extractTokensFromText(node); + if (matches.length > 0) { + properties.forEach(property => { + matches.forEach(match => { + tokens.push({ + property, + token: match, + path: basePath, + }); + }); + }); + } + + // Check for CSS vars + if (text.includes('var(')) { + properties.forEach(property => { + const cssVarTokens = extractTokensFromCssVars(text, property, basePath, TOKEN_REGEX); + tokens.push(...cssVarTokens); + }); + } + } + + // For any other complex expressions, process them normally + else { + processNode(node, basePath); + } + } + + if (Node.isPropertyAssignment(prop)) { + const initializer = prop.getInitializer(); + if (initializer) { + processNode(initializer); + } + } else if (Node.isSpreadAssignment(prop)) { + processNode(prop.getExpression()); + } + + return tokens; +} + +/** + * Analyzes mergeClasses calls to determine style relationships + */ +function analyzeMergeClasses(sourceFile: SourceFile): StyleMapping[] { + const mappings: StyleMapping[] = []; + + sourceFile.forEachDescendant(node => { + if (Node.isCallExpression(node) && node.getExpression().getText() === 'mergeClasses') { + const parentNode = node.getParent(); + let slotName = ''; + if (Node.isBinaryExpression(parentNode)) { + slotName = parentNode.getLeft().getText().split('.')[1]; + } + const mapping: StyleMapping = { + baseStyles: [], + conditionalStyles: [], + slotName, + }; + + /** + * TODO: We could also walk the tree to find what function is assigned to our makeStyles call, and thus, what + * styles object we're working with. Typically this is called `useStyles` and then assigned to `styles`. We've hard + * coded it for now but this could be improved. + */ + + node.getArguments().forEach(arg => { + // Handle direct style references + if (Node.isPropertyAccessExpression(arg)) { + mapping.baseStyles.push(arg.getText()); + } + // Handle conditional styles + else if (Node.isBinaryExpression(arg)) { + const right = arg.getRight(); + if (Node.isPropertyAccessExpression(right)) { + mapping.conditionalStyles.push({ + style: right.getText(), + condition: arg.getLeft().getText(), + }); + } + } else if (!arg.getText().includes('.')) { + // We found a single variable (makeResetStyles or other assignment), add to base styles for lookup later + mapping.baseStyles.push(arg.getText()); + } + }); + + if (mapping.baseStyles.length || mapping.conditionalStyles.length) { + mappings.push(mapping); + } + } + }); + + return mappings; +} + +/** + * Creates a StyleContent object from token references. + * + * The path structure in token references is relative to the style property being processed. + * For example, given a style object: + * ```typescript + * { + * root: { // Handled by analyzeMakeStyles + * color: token, // path = ['color'] + * ':hover': { // Start of nested structure + * color: token // path = [':hover', 'color'] + * } + * } + * } + * ``` + * Property names reflect the actual CSS property, derived from the path. + */ +function createStyleContent(tokens: TokenReference[]): StyleContent { + const content: StyleContent = { + tokens: tokens.filter(t => { + return t.path.length === 1; + }), + }; + + // Nested structures have paths longer than 1 + const nestedTokens = tokens.filter(t => t.path.length > 1); + if (nestedTokens.length > 0) { + content.nested = nestedTokens.reduce((acc, token) => { + const nestedKey = token.path[0]; + + if (!acc[nestedKey]) { + acc[nestedKey] = { tokens: [] }; + } + + acc[nestedKey].tokens.push({ + ...token, + path: [], // Reset path as we've used it for nesting + }); + + return acc; + }, {}); + } + + return content; +} + +/** + * Creates metadata from style mappings + */ +function createMetadata(styleMappings: StyleMapping[]): StyleMetadata { + const metadata: StyleMetadata = { + styleConditions: {}, + }; + + styleMappings.forEach(mapping => { + mapping.baseStyles.forEach(style => { + if (metadata.styleConditions[style]) { + metadata.styleConditions[style].isBase = true; + } else { + metadata.styleConditions[style] = { isBase: true, slotName: mapping.slotName || '' }; + } + }); + + mapping.conditionalStyles.forEach(({ style, condition }) => { + if (metadata.styleConditions[style]) { + metadata.styleConditions[style].conditions = metadata.styleConditions[style].conditions || []; + if (condition) { + metadata.styleConditions[style].conditions!.push(condition); + } + } else { + metadata.styleConditions[style] = { + conditions: condition ? [condition] : [], + slotName: mapping.slotName || '', + }; + } + }); + }); + + return metadata; +} + +/** + * Analyzes makeStyles calls to get token usage and structure + */ +async function analyzeMakeStyles( + sourceFile: SourceFile, + importedValues: Map | undefined = undefined, +): Promise { + const analysis: StyleAnalysis = {}; + + sourceFile.forEachDescendant(node => { + if (Node.isCallExpression(node) && node.getExpression().getText() === 'makeStyles') { + const stylesArg = node.getArguments()[0]; + const parentNode = node.getParent(); + if (Node.isObjectLiteralExpression(stylesArg) && Node.isVariableDeclaration(parentNode)) { + // Process the styles object + stylesArg.getProperties().forEach(prop => { + if (Node.isPropertyAssignment(prop)) { + const styleName = prop.getName(); + const tokens = processStyleProperty(prop, importedValues); + const functionName = parentNode.getName(); + if (!analysis[functionName]) { + analysis[functionName] = {}; + } + if (tokens.length) { + analysis[functionName][styleName] = createStyleContent(tokens); + } + } + }); + } + } else if (Node.isCallExpression(node) && node.getExpression().getText() === 'makeResetStyles') { + // Similar to above, but the styles are stored under the assigned function name instead of local variable + const stylesArg = node.getArguments()[0]; + const parentNode = node.getParent(); + if (Node.isVariableDeclaration(parentNode)) { + const functionName = parentNode.getName(); + if (!analysis[functionName]) { + analysis[functionName] = {}; + } + // We store 'isResetStyles' to differentiate from makeStyles and link mergeClasses variables + analysis[functionName][makeResetStylesToken] = { + tokens: [], + nested: {}, + isResetStyles: true, + }; + if (Node.isObjectLiteralExpression(stylesArg)) { + // Process the styles object + stylesArg.getProperties().forEach(prop => { + if (Node.isPropertyAssignment(prop)) { + const tokens = processStyleProperty(prop, importedValues, true); + if (tokens.length) { + const styleContent = createStyleContent(tokens); + analysis[functionName][makeResetStylesToken].tokens = analysis[functionName][ + makeResetStylesToken + ].tokens.concat(styleContent.tokens); + analysis[functionName][makeResetStylesToken].nested = { + ...analysis[functionName][makeResetStylesToken].nested, + ...styleContent.nested, + }; + } + } + }); + } + } + } + }); + + const variables: VariableMapping[] = []; + const styleFunctionNames: string[] = Object.keys(analysis); + + sourceFile.forEachDescendant(node => { + // We do a second parse to link known style functions (i.e. makeResetStyles assigned function variable names). + // This is necessary to handle cases where we're using a variable directly in mergeClasses to link styles. + + if (Node.isCallExpression(node) && styleFunctionNames.includes(node.getExpression().getText())) { + const parentNode = node.getParent(); + const functionName = node.getExpression().getText(); + if (Node.isVariableDeclaration(parentNode)) { + const variableName = parentNode.getName(); + const variableMap: VariableMapping = { + functionName, + variableName, + }; + variables.push(variableMap); + } + } + }); + + // Store our makeResetStyles assigned variables in the analysis to link later + variables.forEach(variable => { + Object.keys(analysis[variable.functionName]).forEach(styleName => { + if (analysis[variable.functionName][styleName].assignedVariables === undefined) { + analysis[variable.functionName][styleName].assignedVariables = []; + } + analysis[variable.functionName][styleName].assignedVariables?.push(variable.variableName); + }); + }); + + return analysis; +} + +/** + * Combines mergeClasses and makeStyles analysis, with import resolution + */ +async function analyzeFile(filePath: string, project: Project): Promise { + log(`Analyzing ${filePath}`); + + const sourceFile = project.addSourceFileAtPath(filePath); + + // First analyze imports to find imported string values + log('Analyzing imports to find imported token values'); + const importedValues = await measureAsync('analyze imports', () => analyzeImports(sourceFile, project)); + + // Second pass: Analyze mergeClasses + const styleMappings = measure('analyze mergeClasses', () => analyzeMergeClasses(sourceFile)); + + // Third pass: Analyze makeStyles with imported values + const styleAnalysis = await measureAsync('analyze makeStyles', () => + analyzeMakeStyles(sourceFile, importedValues), + ); + + // Create enhanced analysis with separated styles and metadata + return { + styles: styleAnalysis, + metadata: createMetadata(styleMappings), + }; +} + +export { analyzeFile, processStyleProperty, analyzeMergeClasses, analyzeMakeStyles, createStyleContent }; +export type { StyleMapping }; diff --git a/packages/token-analyzer/src/cssVarTokenExtractor.ts b/packages/token-analyzer/src/cssVarTokenExtractor.ts new file mode 100644 index 000000000..ed8753e92 --- /dev/null +++ b/packages/token-analyzer/src/cssVarTokenExtractor.ts @@ -0,0 +1,90 @@ +// cssVarTokenExtractor.ts +import { log } from './debugUtils.js'; +import { TokenReference } from './types.js'; +import { extractTokensFromText } from './tokenUtils.js'; + +/** + * Extracts token references from CSS variable syntax including nested fallback chains + * Example: var(--some-token, var(--fallback, var(${tokens.someToken}))) + * + * @param value The CSS variable string to process + * @param propertyName The CSS property name this value is assigned to + * @param path The path in the style object + * @param TOKEN_REGEX The regex pattern to match token references + * @returns Array of token references found in the string + */ +export function extractTokensFromCssVars( + value: string, + propertyName: string, + path: string[] = [], + TOKEN_REGEX: RegExp, +): TokenReference[] { + const tokens: TokenReference[] = []; + + let testValue = value; + + // Direct token matches in the string + const directMatches = extractTokensFromText(testValue); + if (directMatches.length > 0) { + directMatches.forEach(match => { + testValue = testValue.replace(match, ''); // Remove direct matches from the string + tokens.push({ + property: propertyName, + token: match, + path, + }); + }); + } + + // we have an issue with duplicated calls. A direct match will match the whole string as would a token within a var part + // found by the regex, so we need to remove the direct matches from the string + + // Look for CSS var() patterns + const varPattern = /var\s*\(\s*([^,)]*),?\s*(.*?)\s*\)/g; + let match: RegExpExecArray | null; + + while ((match = varPattern.exec(testValue)) !== null) { + const fullMatch = match[0]; // The entire var(...) expression + const varName = match[1]; // The CSS variable name + const fallback = match[2]; // The fallback value, which might contain nested var() calls + + log(`Processing CSS var: ${fullMatch}`); + log(` - Variable name: ${varName}`); + log(` - Fallback: ${fallback}`); + + // Check if the variable name contains a token reference + const varNameTokens = extractTokensFromText(varName); + if (varNameTokens.length > 0) { + varNameTokens.forEach(token => { + tokens.push({ + property: propertyName, + token, + path, + }); + }); + } + + // If there's a fallback value, it might contain tokens or nested var() calls + if (fallback) { + // Recursively process the fallback + if (fallback.includes('var(')) { + const fallbackTokens = extractTokensFromCssVars(fallback, propertyName, path, TOKEN_REGEX); + tokens.push(...fallbackTokens); + } else { + // Check for direct token references in the fallback + const fallbackTokens = extractTokensFromText(fallback); + if (fallbackTokens.length > 0) { + fallbackTokens.forEach(token => { + tokens.push({ + property: propertyName, + token, + path, + }); + }); + } + } + } + } + + return tokens; +} diff --git a/packages/token-analyzer/src/debugUtils.ts b/packages/token-analyzer/src/debugUtils.ts new file mode 100644 index 000000000..1b04cf824 --- /dev/null +++ b/packages/token-analyzer/src/debugUtils.ts @@ -0,0 +1,57 @@ +/* eslint-disable no-console */ +import { performance } from 'perf_hooks'; + +interface DebugConfig { + debug: boolean; + perf: boolean; +} + +let config: DebugConfig = { + debug: false, + perf: false, +}; + +export const configure = (options: Partial): void => { + config = { ...config, ...options }; +}; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const log = (message: string, ...args: any[]): void => { + if (config.debug) { + console.log(`DEBUG: ${message}`, ...args); + } +}; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const error = (message: string, errorArg: any): void => { + // Always log errors, but with debug info if enabled + const prefix = config.debug ? 'DEBUG ERROR: ' : 'ERROR: '; + console.error(`${prefix}${message}`, errorArg); +}; + +export const measureAsync = async (name: string, fn: () => Promise): Promise => { + if (!config.perf) { + return fn(); + } + + const start = performance.now(); + return fn().finally(() => { + const end = performance.now(); + console.log(`PERF: ${name} took ${(end - start).toFixed(2)}ms`); + }); +}; + +export const measure = (name: string, fn: () => T): T => { + if (!config.perf) { + return fn(); + } + + const start = performance.now(); + const result = fn(); + const end = performance.now(); + console.log(`PERF: ${name} took ${(end - start).toFixed(2)}ms`); + return result; +}; + +// Re-export types that consumers might need +export type { DebugConfig }; diff --git a/packages/token-analyzer/src/fileOperations.ts b/packages/token-analyzer/src/fileOperations.ts new file mode 100644 index 000000000..8d4c31ce3 --- /dev/null +++ b/packages/token-analyzer/src/fileOperations.ts @@ -0,0 +1,70 @@ +import { promises as fs } from 'fs'; +import { join, resolve, dirname } from 'path'; +import { IGNORED_DIRS, VALID_EXTENSIONS } from './types.js'; + +/** + * Recursively finds all style-related files in a directory + * @param dir The root directory to start searching from + * @returns Array of absolute file paths + */ +export async function findStyleFiles(dir: string): Promise { + const styleFiles: string[] = []; + + async function scan(directory: string): Promise { + const entries = await fs.readdir(directory, { withFileTypes: true }); + + const scanPromises = entries.map(async entry => { + const fullPath = join(directory, entry.name); + + if (entry.isDirectory() && !IGNORED_DIRS.includes(entry.name)) { + await scan(fullPath); + } else if ( + (entry.name.includes('style') || entry.name.includes('styles')) && + VALID_EXTENSIONS.some(ext => entry.name.endsWith(ext)) + ) { + styleFiles.push(fullPath); + } + }); + + await Promise.all(scanPromises); + } + + await scan(dir); + return styleFiles; +} + +/** + * Resolves a relative import path to an absolute file path + * @param importPath The import path from the source file + * @param currentFilePath The path of the file containing the import + * @returns Resolved absolute path or null if not found + */ +export async function resolveImportPath(importPath: string, currentFilePath: string): Promise { + if (!importPath.startsWith('.')) { + return null; + } + + const dir = dirname(currentFilePath); + const absolutePath = resolve(dir, importPath); + + // Try original path + try { + const stats = await fs.stat(absolutePath); + if (stats.isFile()) { + return absolutePath; + } + } catch {} // Ignore errors and try extensions + + // Try with extensions + for (const ext of VALID_EXTENSIONS) { + const pathWithExt = absolutePath + ext; + try { + const stats = await fs.stat(pathWithExt); + if (stats.isFile()) { + return pathWithExt; + } + } catch {} // Ignore errors and continue trying + } + + return null; +} diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts new file mode 100644 index 000000000..bd76e6e9a --- /dev/null +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -0,0 +1,558 @@ +// importAnalyzer.ts +import { Project, Node, SourceFile, ImportDeclaration, Symbol, TypeChecker, SyntaxKind } from 'ts-morph'; +import { log } from './debugUtils.js'; +import { TokenReference, TOKEN_REGEX } from './types.js'; +import { getModuleSourceFile } from './moduleResolver.js'; +import { extractTokensFromCssVars } from './cssVarTokenExtractor.js'; +import { isTokenReference, extractTokensFromText } from './tokenUtils.js'; + +/** + * Represents a portion of a template expression + */ +interface TemplateSpan { + text: string; // The actual text content + isToken: boolean; // Whether this span is a token reference + isReference: boolean; // Whether this span is a reference to another variable + referenceName?: string; // The name of the referenced variable if isReference is true +} + +/** + * Represents a value imported from another module + */ +export interface ImportedValue { + value: string; + sourceFile: string; + isLiteral: boolean; + + // Enhanced fields for template processing + templateSpans?: TemplateSpan[]; // For template expressions with spans + resolvedTokens?: TokenReference[]; // Pre-extracted tokens from this value +} + +/** + * Analyzes imports in a source file to extract string values + */ +export async function analyzeImports(sourceFile: SourceFile, project: Project): Promise> { + const importedValues = new Map(); + const filePath = sourceFile.getFilePath(); + + log(`Analyzing imports in ${filePath}`); + + // Get TypeScript's type checker + const typeChecker = project.getTypeChecker(); + + // Process all import declarations + for (const importDecl of sourceFile.getImportDeclarations()) { + try { + // Process the import declaration + await processImportDeclaration(importDecl, sourceFile, project, importedValues, typeChecker); + } catch (err) { + log(`Error processing import: ${importDecl.getModuleSpecifierValue()}`, err); + } + } + + return importedValues; +} + +/** + * Process a single import declaration + */ +async function processImportDeclaration( + importDecl: ImportDeclaration, + sourceFile: SourceFile, + project: Project, + importedValues: Map, + typeChecker: TypeChecker, +): Promise { + const moduleSpecifier = importDecl.getModuleSpecifierValue(); + const containingFilePath = sourceFile.getFilePath(); + + // Use our module resolver to get the imported file + const importedFile = getModuleSourceFile(project, moduleSpecifier, containingFilePath); + + if (!importedFile) { + log(`Could not resolve module: ${moduleSpecifier}`); + return; + } + + // Process named imports (import { x } from 'module') + processNamedImports(importDecl, importedFile, project, importedValues, typeChecker); + + // Process default import (import x from 'module') + processDefaultImport(importDecl, importedFile, project, importedValues, typeChecker); +} + +/** + * Process named imports using TypeScript's type checker to follow re-exports + */ +function processNamedImports( + importDecl: ImportDeclaration, + importedFile: SourceFile, + project: Project, + importedValues: Map, + typeChecker: TypeChecker, +): void { + for (const namedImport of importDecl.getNamedImports()) { + const importName = namedImport.getName(); + const alias = namedImport.getAliasNode()?.getText() || importName; + + // Find the export's true source using TypeScript's type checker + const exportInfo = findExportDeclaration(importedFile, importName, typeChecker); + + if (exportInfo) { + const { declaration, sourceFile: declarationFile } = exportInfo; + + // Extract the value from the declaration + const valueInfo = extractValueFromDeclaration(declaration, typeChecker); + + if (valueInfo) { + importedValues.set(alias, { + value: valueInfo.value, + sourceFile: declarationFile.getFilePath(), + isLiteral: valueInfo.isLiteral, + templateSpans: valueInfo.templateSpans, + }); + + log(`Added imported value: ${alias} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); + } + } + } +} + +/** + * Process default import using TypeScript's type checker + */ +function processDefaultImport( + importDecl: ImportDeclaration, + importedFile: SourceFile, + project: Project, + importedValues: Map, + typeChecker: TypeChecker, +): void { + const defaultImport = importDecl.getDefaultImport(); + if (!defaultImport) { + return; + } + + const importName = defaultImport.getText(); + + // Find the default export's true source + const exportInfo = findExportDeclaration(importedFile, 'default', typeChecker); + + if (exportInfo) { + const { declaration, sourceFile: declarationFile } = exportInfo; + + // Extract the value from the declaration + const valueInfo = extractValueFromDeclaration(declaration, typeChecker); + + if (valueInfo) { + importedValues.set(importName, { + value: valueInfo.value, + sourceFile: declarationFile.getFilePath(), + isLiteral: valueInfo.isLiteral, + templateSpans: valueInfo.templateSpans, + }); + + log(`Added default import: ${importName} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); + } + } +} + +/** + * Find an export's original declaration using TypeScript's type checker + */ +function findExportDeclaration( + sourceFile: SourceFile, + exportName: string, + typeChecker: TypeChecker, +): { declaration: Node; sourceFile: SourceFile } | undefined { + try { + // Get the source file's symbol (represents the module) + const sourceFileSymbol = typeChecker.getSymbolAtLocation(sourceFile); + if (!sourceFileSymbol) { + log(`No symbol found for source file ${sourceFile.getFilePath()}`); + return undefined; + } + + // Get all exports from this module + const exports = typeChecker.getExportsOfModule(sourceFileSymbol); + if (!exports || exports.length === 0) { + log(`No exports found in module ${sourceFile.getFilePath()}`); + return undefined; + } + + // Find the specific export we're looking for + const exportSymbol = exports.find((symbol: Symbol) => symbol.getName() === exportName); + if (!exportSymbol) { + log(`Export symbol '${exportName}' not found in ${sourceFile.getFilePath()}`); + return undefined; + } + + // If this is an alias (re-export), get the original symbol + let resolvedSymbol: Symbol = exportSymbol; + if (exportSymbol.isAlias()) { + // we're ok type casting here because we know the symbol is an alias from the previous check but TS won't pick up on it + resolvedSymbol = typeChecker.getAliasedSymbol(exportSymbol) as Symbol; + log(`Resolved alias to: ${resolvedSymbol.getName()}`); + } + + // Get the value declaration from the resolved symbol + const valueDeclaration = resolvedSymbol.getValueDeclaration(); + if (!valueDeclaration) { + log(`No value declaration found for ${exportName}`); + + // Fallback to any declaration if value declaration is not available + const declarations = resolvedSymbol.getDeclarations(); + if (!declarations || declarations.length === 0) { + log(`No declarations found for ${exportName}`); + return undefined; + } + + const declaration = declarations[0]; + const declarationSourceFile = declaration.getSourceFile(); + + return { + declaration, + sourceFile: declarationSourceFile, + }; + } + + const declarationSourceFile = valueDeclaration.getSourceFile(); + + log( + `Found declaration for '${exportName}': ${valueDeclaration.getKindName()} in ${declarationSourceFile.getFilePath()}`, + ); + return { + declaration: valueDeclaration, + sourceFile: declarationSourceFile, + }; + } catch (err) { + log(`Error finding export declaration for ${exportName}:`, err); + return undefined; + } +} + +/** + * Extract string value from a declaration node + */ +function extractValueFromDeclaration( + declaration: Node, + typeChecker: TypeChecker, +): { value: string; isLiteral: boolean; templateSpans?: TemplateSpan[] } | undefined { + // Handle variable declarations + if (Node.isVariableDeclaration(declaration)) { + const initializer = declaration.getInitializer(); + return extractValueFromExpression(initializer, typeChecker); + } + // Handle export assignments (export default "value") + if (Node.isExportAssignment(declaration)) { + const expression = declaration.getExpression(); + return extractValueFromExpression(expression, typeChecker); + } + + // Handle named exports (export { x }) + if (Node.isExportSpecifier(declaration)) { + // Find the local symbol this specifier refers to + const name = declaration.getNameNode().getText(); + const sourceFile = declaration.getSourceFile(); + + // Find the local declaration with this name + for (const varDecl of sourceFile.getDescendantsOfKind(SyntaxKind.VariableDeclaration)) { + if (varDecl.getName() === name) { + const initializer = varDecl.getInitializer(); + return extractValueFromExpression(initializer, typeChecker); + } + } + } + + return undefined; +} + +/** + * Extract value from an expression node with enhanced template literal handling + */ +function extractValueFromExpression( + expression: Node | undefined, + typeChecker: TypeChecker, +): + | { + value: string; + isLiteral: boolean; + templateSpans?: TemplateSpan[]; + } + | undefined { + if (!expression) { + return undefined; + } + + if (Node.isStringLiteral(expression)) { + return { + value: expression.getLiteralValue(), + isLiteral: true, + }; + } else if (Node.isTemplateExpression(expression)) { + // Process the template head and spans fully + const head = expression.getHead().getLiteralText(); + const spans = expression.getTemplateSpans(); + + let fullValue = head; + const templateSpans: TemplateSpan[] = []; + + // Add head as a non-token span if it's not empty + if (head) { + templateSpans.push({ + text: head, + isToken: false, + isReference: false, + }); + } + + // Process each span in the template expression + for (const span of spans) { + const spanExpr = span.getExpression(); + const spanText = spanExpr.getText(); + const literal = span.getLiteral().getLiteralText(); + + // Handle different types of expressions in template spans + if (Node.isPropertyAccessExpression(spanExpr) && isTokenReference(spanExpr)) { + // Direct token reference in template span + templateSpans.push({ + text: spanText, + isToken: true, + isReference: false, + }); + fullValue += spanText; + } else if (Node.isIdentifier(spanExpr)) { + // Potential reference to another variable + templateSpans.push({ + text: spanText, + isToken: false, + isReference: true, + referenceName: spanText, + }); + fullValue += spanText; + } else { + // Other expression types - try to resolve recursively + const resolvedExpr = extractValueFromExpression(spanExpr, typeChecker); + if (resolvedExpr) { + if (resolvedExpr.templateSpans) { + // If it has its own spans, include them + templateSpans.push(...resolvedExpr.templateSpans); + } else { + // Otherwise add the value + templateSpans.push({ + text: resolvedExpr.value, + isToken: false, + isReference: false, + }); + } + fullValue += resolvedExpr.value; + } else { + // Fallback to the raw text if we can't resolve + templateSpans.push({ + text: spanText, + isToken: false, + isReference: false, + }); + fullValue += spanText; + } + } + + // Add the literal part that follows the expression + if (literal) { + templateSpans.push({ + text: literal, + isToken: false, + isReference: false, + }); + fullValue += literal; + } + } + + return { + value: fullValue, + isLiteral: true, + templateSpans, + }; + } else if (Node.isIdentifier(expression)) { + // Try to resolve the identifier to its value + const symbol = expression.getSymbol(); + if (!symbol) { + return { + value: expression.getText(), + isLiteral: false, + }; + } + + // Get the declaration of this identifier + const decl = symbol.getValueDeclaration() || symbol.getDeclarations()?.[0]; + if (!decl) { + return { + value: expression.getText(), + isLiteral: false, + }; + } + + // If it's a variable declaration, get its initializer + if (Node.isVariableDeclaration(decl)) { + const initializer = decl.getInitializer(); + if (initializer) { + // Recursively resolve the initializer + return extractValueFromExpression(initializer, typeChecker); + } + } + + return { + value: expression.getText(), + isLiteral: false, + }; + } else if (Node.isPropertyAccessExpression(expression)) { + // Handle tokens.xyz or other property access + return { + value: expression.getText(), + isLiteral: false, + }; + } else if (Node.isNoSubstitutionTemplateLiteral(expression)) { + return { + value: expression.getLiteralValue(), + isLiteral: true, + }; + } + + // Default case for unhandled expression types + return undefined; +} + +/** + * Process string tokens in imported values + */ +export function processImportedStringTokens( + importedValues: Map, + propertyName: string, + value: string, + path: string[] = [], + tokenRegex: RegExp = TOKEN_REGEX, +): TokenReference[] { + const tokens: TokenReference[] = []; + + // Check if the value is an imported value reference + if (importedValues.has(value)) { + const importedValue = importedValues.get(value)!; + + // If we've already pre-resolved tokens for this value, use them + if (importedValue.resolvedTokens) { + return importedValue.resolvedTokens.map(token => ({ + ...token, + property: propertyName, // Update property name for current context + path: path, // Update path for current context + })); + } + + if (importedValue.isLiteral) { + if (importedValue.templateSpans) { + // Process template spans specially + for (const span of importedValue.templateSpans) { + if (span.isToken) { + // Direct token reference in span + tokens.push({ + property: propertyName, + token: span.text, + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); + } else if (span.isReference && span.referenceName && importedValues.has(span.referenceName)) { + // Reference to another imported value - process recursively + const spanTokens = processImportedStringTokens( + importedValues, + propertyName, + span.referenceName, + path, + tokenRegex, + ); + tokens.push(...spanTokens); + } else if (span.text.includes('var(')) { + // Check for CSS variables in the span text + const cssVarTokens = extractTokensFromCssVars(span.text, propertyName, path, tokenRegex); + cssVarTokens.forEach(token => { + tokens.push({ + ...token, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); + }); + } else { + // Check for direct token matches in non-reference spans + const matches = extractTokensFromText(span.text); + if (matches.length > 0) { + matches.forEach(match => { + tokens.push({ + property: propertyName, + token: match, + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); + }); + } + } + } + } else { + // Standard processing for literals without spans + // First, check for direct token references + const matches = extractTokensFromText(importedValue.value); + if (matches.length > 0) { + matches.forEach(match => { + tokens.push({ + property: propertyName, + token: match, + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); + }); + } else if (importedValue.value.includes('var(')) { + // Then check for CSS variable patterns + const cssVarTokens = extractTokensFromCssVars(importedValue.value, propertyName, path, tokenRegex); + cssVarTokens.forEach(token => { + tokens.push({ + ...token, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); + }); + } + } + } else { + // Non-literal values (like property access expressions) + if (isTokenReference(importedValue.value)) { + tokens.push({ + property: propertyName, + token: importedValue.value, + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); + } else { + // Check for any token references in the value + const matches = extractTokensFromText(importedValue.value); + if (matches.length > 0) { + matches.forEach(match => { + tokens.push({ + property: propertyName, + token: match, + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); + }); + } + } + } + + // Cache the resolved tokens for future use + importedValue.resolvedTokens = tokens.map(token => ({ ...token })); + } + + return tokens; +} diff --git a/packages/token-analyzer/src/index.ts b/packages/token-analyzer/src/index.ts index cb0ff5c3b..c67bde652 100644 --- a/packages/token-analyzer/src/index.ts +++ b/packages/token-analyzer/src/index.ts @@ -1 +1,130 @@ -export {}; +/* eslint-disable no-console */ +import { Project } from 'ts-morph'; +import { promises as fs } from 'fs'; +import { relative } from 'path'; +import { format } from 'prettier'; +import { findStyleFiles } from './fileOperations.js'; +import { analyzeFile } from './astAnalyzer.js'; +import { AnalysisResults, FileAnalysis } from './types.js'; +import { configure, log, error, measureAsync } from './debugUtils.js'; + +async function analyzeProjectStyles( + rootDir: string, + outputFile?: string, + options: { debug?: boolean; perf?: boolean } = {}, +): Promise { + configure({ + debug: options.debug || false, + perf: options.perf || false, + }); + + log(`Starting analysis of ${rootDir}`); + const results: AnalysisResults = {}; + + try { + const styleFiles = await measureAsync('find style files', () => findStyleFiles(rootDir)); + console.log(`Found ${styleFiles.length} style files to analyze`); + + const project = new Project({ + skipAddingFilesFromTsConfig: true, + skipFileDependencyResolution: false, + }); + + for (const file of styleFiles) { + const relativePath = relative(rootDir, file); + console.log(`Analyzing ${relativePath}...`); + + try { + const analysis = await analyzeFile(file, project); + if (Object.keys(analysis.styles).length > 0) { + results[relativePath] = { + styles: analysis.styles, + metadata: analysis.metadata, + }; + } + } catch (err) { + error(`Error analyzing ${relativePath}:`, err); + } + } + + if (outputFile) { + await measureAsync('write output file', async () => { + const formatted = format(JSON.stringify(sortObjectByKeys(results), null, 2), { + parser: 'json', + printWidth: 120, + tabWidth: 2, + singleQuote: true, + trailingComma: 'all', + arrowParens: 'avoid', + }); + await fs.writeFile(outputFile, formatted, 'utf8'); + console.log(`Analysis written to ${outputFile}`); + }); + } + + return results; + } catch (err) { + error('Error during analysis:', err); + throw err; + } +} + +/** + * Sorts an object by its keys alphabetically + * + * @param obj Object to sort + * @returns New object with the same properties, sorted by keys + */ +function sortObjectByKeys(obj: Record): Record { + return Object.keys(obj) + .sort() + .reduce((sorted: Record, key: string) => { + sorted[key] = obj[key]; + return sorted; + }, {}); +} + +function countTokens(analysis: FileAnalysis): number { + let count = 0; + Object.values(analysis.styles).forEach(_value => { + Object.values(_value).forEach(value => { + count += value.tokens.length; + if (value.nested) { + Object.values(value.nested).forEach(nestedValue => { + count += nestedValue.tokens.length; + }); + } + }); + }); + return count; +} + +// CLI execution +const isRunningDirectly = process.argv[1].endsWith('index.ts'); +if (isRunningDirectly) { + const rootDir = process.argv[2] || '../..'; + const outputFile = process.argv[3] || './output.json'; + const debug = process.argv.includes('--debug'); + const perf = process.argv.includes('--perf'); + + console.log(`Starting analysis of ${rootDir}`); + analyzeProjectStyles(rootDir, outputFile, { debug, perf }) + .then(results => { + const totalFiles = Object.keys(results).length; + let totalTokens = 0; + + Object.values(results).forEach(fileAnalysis => { + totalTokens += countTokens(fileAnalysis); + }); + + console.log('\nAnalysis complete!'); + console.log(`Processed ${totalFiles} files containing styles`); + console.log(`Found ${totalTokens} token references`); + }) + .catch(err => { + console.error('Analysis failed:', err); + process.exit(1); + }); +} + +export { analyzeProjectStyles }; diff --git a/packages/token-analyzer/src/moduleResolver.ts b/packages/token-analyzer/src/moduleResolver.ts new file mode 100644 index 000000000..c41656a6b --- /dev/null +++ b/packages/token-analyzer/src/moduleResolver.ts @@ -0,0 +1,204 @@ +// moduleResolver.ts +import * as ts from 'typescript'; +import * as path from 'path'; +import * as fs from 'fs'; +import { Project, SourceFile } from 'ts-morph'; +import { log } from './debugUtils.js'; + +// Create a wrapper around TypeScript APIs for easier mocking +export const tsUtils = { + resolveModuleName: ( + moduleName: string, + containingFile: string, + compilerOptions: ts.CompilerOptions, + host: ts.ModuleResolutionHost, + ) => ts.resolveModuleName(moduleName, containingFile, compilerOptions, host), + + getFileSize: (filePath: string) => ts.sys.getFileSize?.(filePath), + + fileExists: (filePath: string) => ts.sys.fileExists(filePath), +}; + +// Cache for resolved module paths +export const modulePathCache = new Map(); + +// Cache for resolved source files +export const resolvedFilesCache = new Map(); + +/** + * Creates a cache key for module resolution + */ +function createCacheKey(moduleSpecifier: string, containingFile: string): string { + return `${containingFile}:${moduleSpecifier}`; +} + +/** + * Verifies a resolved file path actually exists + */ +function verifyFileExists(filePath: string | undefined | null): boolean { + if (!filePath) { + return false; + } + + try { + // Use TypeScript's file system abstraction for testing compatibility + return tsUtils.fileExists(filePath); + } catch (e) { + // If that fails, try Node's fs as fallback + try { + return fs.existsSync(filePath); + } catch (nestedE) { + return false; + } + } +} + +/** + * Resolves a module specifier to an absolute file path using TypeScript's resolution + * + * @param project TypeScript project + * @param moduleSpecifier The module to resolve (e.g., './utils', 'react') + * @param containingFile The file containing the import + * @returns The absolute file path or null if it can't be resolved + */ +export function resolveModulePath(project: Project, moduleSpecifier: string, containingFile: string): string | null { + const cacheKey = createCacheKey(moduleSpecifier, containingFile); + + // Check cache first + if (modulePathCache.has(cacheKey)) { + return modulePathCache.get(cacheKey)!; + } + + // For relative paths, try a simple path resolution first + if (moduleSpecifier.startsWith('.')) { + try { + const basePath = path.dirname(containingFile); + const extensions = ['.ts', '.tsx', '.js', '.jsx', '.d.ts']; + + // Check if the module specifier already has a valid extension + const hasExtension = extensions.some(ext => moduleSpecifier.endsWith(ext)); + + // 1. If it has an extension, try the exact path first + if (hasExtension) { + const exactPath = path.resolve(basePath, moduleSpecifier); + if (verifyFileExists(exactPath)) { + modulePathCache.set(cacheKey, exactPath); + return exactPath; + } + } + + // 2. Try with added extensions (for paths without extension or if exact path failed) + if (!hasExtension) { + for (const ext of extensions) { + const candidatePath = path.resolve(basePath, moduleSpecifier + ext); + if (verifyFileExists(candidatePath)) { + modulePathCache.set(cacheKey, candidatePath); + return candidatePath; + } + } + } + + // 3. Try as directory with index file + const dirPath = hasExtension + ? path.resolve( + basePath, + path.dirname(moduleSpecifier), + path.basename(moduleSpecifier, path.extname(moduleSpecifier)), + ) + : path.resolve(basePath, moduleSpecifier); + + for (const ext of extensions) { + const candidatePath = path.resolve(dirPath, 'index' + ext); + if (verifyFileExists(candidatePath)) { + modulePathCache.set(cacheKey, candidatePath); + return candidatePath; + } + } + } catch (e) { + // Fall through to TypeScript's module resolution + } + } + + // Use TypeScript's module resolution API + const result = tsUtils.resolveModuleName( + moduleSpecifier, + containingFile, + project.getCompilerOptions() as ts.CompilerOptions, + ts.sys, + ); + + // Validate and cache the result + if (result.resolvedModule) { + const resolvedPath = result.resolvedModule.resolvedFileName; + + // Verify the file actually exists + if (verifyFileExists(resolvedPath)) { + modulePathCache.set(cacheKey, resolvedPath); + return resolvedPath; + } + } + + // Cache negative result + log(`Could not resolve module: ${moduleSpecifier} from ${containingFile}`); + modulePathCache.set(cacheKey, null); + return null; +} + +/** + * Gets a source file for a module specifier, resolving and adding it if needed + * + * @param project TypeScript project + * @param moduleSpecifier The module to resolve (e.g., './utils', 'react') + * @param containingFile The file containing the import + * @returns The resolved source file or null if it can't be resolved + */ +export function getModuleSourceFile( + project: Project, + moduleSpecifier: string, + containingFile: string, +): SourceFile | null { + log(`Resolving module: ${moduleSpecifier} from ${containingFile}`); + + // Step 1: Try to resolve the module to a file path + const resolvedPath = resolveModulePath(project, moduleSpecifier, containingFile); + if (!resolvedPath) { + log(`Could not resolve module: ${moduleSpecifier}`); + return null; + } + + // Step 2: Check if we already have this file + if (resolvedFilesCache.has(resolvedPath)) { + return resolvedFilesCache.get(resolvedPath)!; + } + + // Step 3: Get or add the file to the project + try { + // First try to get file if it's already in the project + let sourceFile = project.getSourceFile(resolvedPath); + + // If not found, add it + if (!sourceFile) { + sourceFile = project.addSourceFileAtPath(resolvedPath); + log(`Added source file: ${resolvedPath}`); + } + + // Cache the result + resolvedFilesCache.set(resolvedPath, sourceFile); + return sourceFile; + } catch (error) { + log(`Error adding source file: ${resolvedPath}`, error); + return null; + } +} + +/** + * Clears the module resolution caches + * Useful for testing or when analyzing multiple projects + */ +export function clearModuleCache(): void { + modulePathCache.clear(); + resolvedFilesCache.clear(); +} + +// Export for testing +export { verifyFileExists }; diff --git a/packages/token-analyzer/src/tokenUtils.ts b/packages/token-analyzer/src/tokenUtils.ts new file mode 100644 index 000000000..12cf9b02e --- /dev/null +++ b/packages/token-analyzer/src/tokenUtils.ts @@ -0,0 +1,94 @@ +// tokenUtils.ts +import { Node, Symbol } from 'ts-morph'; +import { TOKEN_REGEX } from './types.js'; + +/** + * Centralizes token detection logic to make future changes easier + * @param textOrNode The text or Node to check for token references + * @returns true if the text/node contains a token reference + */ +export function isTokenReference(textOrNode: string | Node | Symbol): boolean { + // If we have a Node or Symbol, extract the text to check + let text: string; + + if (typeof textOrNode === 'string') { + text = textOrNode; + } else if (Node.isNode(textOrNode)) { + text = textOrNode.getText(); + } else if (textOrNode instanceof Symbol) { + // For symbols, we need to check the declarations + const declarations = textOrNode.getDeclarations(); + if (!declarations || declarations.length === 0) { + return false; + } + + // Get text from the first declaration + text = declarations[0].getText(); + } else { + return false; + } + // IMPORTANT: Reset lastIndex to prevent issues with the global flag + TOKEN_REGEX.lastIndex = 0; + const test = TOKEN_REGEX.test(text); + return test; +} + +/** + * Extracts all token references from a text string or Node + * @param textOrNode The text or Node to extract tokens from + * @returns Array of token reference strings + */ +export function extractTokensFromText(textOrNode: string | Node | Symbol): string[] { + // If we have a Node or Symbol, extract the text to check + let text: string; + + if (typeof textOrNode === 'string') { + text = textOrNode; + } else if (Node.isNode(textOrNode)) { + text = textOrNode.getText(); + } else if (textOrNode instanceof Symbol) { + // For symbols, we need to check the declarations + const declarations = textOrNode.getDeclarations(); + if (!declarations || declarations.length === 0) { + return []; + } + + // Get text from the first declaration + text = declarations[0].getText(); + } else { + return []; + } + + const matches = text.match(TOKEN_REGEX); + return matches || []; +} + +/** + * Maps shorthand function names to the CSS properties they affect + * @param functionName The name of the shorthand function (e.g., "borderColor" or "shorthands.borderColor") + * @returns Array of CSS property names affected by this shorthand + */ +export function getPropertiesForShorthand(functionName: string): string[] { + const shorthandMap: Record = { + // Border shorthands + borderColor: ['borderTopColor', 'borderRightColor', 'borderBottomColor', 'borderLeftColor'], + border: ['borderWidth', 'borderStyle', 'borderColor'], + borderRadius: ['borderTopLeftRadius', 'borderTopRightRadius', 'borderBottomRightRadius', 'borderBottomLeftRadius'], + + // Padding/margin shorthands + padding: ['paddingTop', 'paddingRight', 'paddingBottom', 'paddingLeft'], + margin: ['marginTop', 'marginRight', 'marginBottom', 'marginLeft'], + + // Other common shorthands + flex: ['flexGrow', 'flexShrink', 'flexBasis'], + gap: ['rowGap', 'columnGap'], + overflow: ['overflowX', 'overflowY'], + gridArea: ['gridRowStart', 'gridColumnStart', 'gridRowEnd', 'gridColumnEnd'], + inset: ['top', 'right', 'bottom', 'left'], + }; + + // Extract base function name if it's a qualified name (e.g., shorthands.borderColor -> borderColor) + const baseName = functionName.includes('.') ? functionName.split('.').pop() : functionName; + + return baseName && shorthandMap[baseName!] ? shorthandMap[baseName!] : []; +} diff --git a/packages/token-analyzer/src/types.ts b/packages/token-analyzer/src/types.ts new file mode 100644 index 000000000..06af5b407 --- /dev/null +++ b/packages/token-analyzer/src/types.ts @@ -0,0 +1,54 @@ +// types.ts +export interface TokenReference { + property: string; + token: string; + path: string[]; + isVariableReference?: boolean; + sourceFile?: string; +} + +export interface StyleContent { + tokens: TokenReference[]; + nested?: StyleTokens; + isResetStyles?: boolean; + assignedVariables?: string[]; +} + +export interface StyleTokens { + [key: string]: StyleContent; +} + +export interface StyleAnalysis { + [key: string]: StyleTokens; +} + +export interface StyleCondition { + style: string; + condition?: string; +} + +export interface StyleMetadata { + styleConditions: { + [styleName: string]: { + isBase?: boolean; + conditions?: string[]; + slotName: string; + }; + }; +} + +export interface FileAnalysis { + styles: StyleAnalysis; + metadata: StyleMetadata; +} + +export interface AnalysisResults { + [filePath: string]: FileAnalysis; +} + +// Constants +export const TOKEN_REGEX = /tokens\.[a-zA-Z0-9.]+/g; +export const IGNORED_DIRS = ['node_modules', 'dist', 'build', '.git']; +export const VALID_EXTENSIONS = ['.ts', '.tsx', '.js', '.jsx', '.mjs']; + +export type TokenMap = Map; From 1309a0bfa1c01c813944e60e60b74d219f559853 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 2 Apr 2025 14:01:28 -0700 Subject: [PATCH 03/75] updating dev dep into root package.json --- package.json | 1 + packages/token-analyzer/package.json | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/package.json b/package.json index 2f932bc1c..0ea76ec47 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,7 @@ "@fluentui/react-migration-v8-v9": "^9.6.23", "@fluentui/react-shared-contexts": "^9.7.2", "@fluentui/scheme-utilities": "^8.3.58", + "@griffel/react": "^1.5.22", "@griffel/shadow-dom": "~0.2.0", "@nx/devkit": "20.8.1", "@nx/eslint": "20.8.1", diff --git a/packages/token-analyzer/package.json b/packages/token-analyzer/package.json index c6ac2f1af..105fb7581 100644 --- a/packages/token-analyzer/package.json +++ b/packages/token-analyzer/package.json @@ -8,9 +8,6 @@ "typescript": "5.7.3", "prettier": "2.8.8" }, - "devDependencies": { - "@griffel/react": "^1.5.22" - }, "scripts": { "analyze-tokens": "NODE_OPTIONS=\"--loader ts-node/esm\" ts-node-esm src/index.ts", "test": "jest", From 55821b2a583f1127299016773e93d1e02183736f Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 2 Apr 2025 14:01:35 -0700 Subject: [PATCH 04/75] updating readme --- packages/token-analyzer/README.md | 192 +++++++++++++++++++++++++++++- 1 file changed, 186 insertions(+), 6 deletions(-) diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index ba0a84e14..6fcf0d787 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -1,11 +1,191 @@ -# token-analyzer +# Design Token Usage Analyzer -This library was generated with [Nx](https://nx.dev). +A static analysis tool that scans your project's style files to track and analyze design token usage. The analyzer helps identify where and how design tokens are being used across your codebase, making it easier to maintain consistency and track token adoption. -## Building +## TODO -Run `nx build token-analyzer` to build the library. +- we also need to ensure var analysis is done correctly after the refactor +- ~~**This is high pri now since we have components in source using this technique (see buttonstyles.styles.ts)** Handle very complex cases like `var(--optional-token, var(--semantic-token, ${some-other-var-with-a-string-or-fallback}))`. This other var might be in another package or file as well. Currently we won't handle this level of depth but we could do symbol extraction in the future if needed to resolve the chain fully. This will likely require changes in importAnalyzer.ts and structural changes in the data we return. On top of needing to find referenced symbols within an aliased template string literal, we might also then need to parse out var fallbacks within short hands. IE: `padding: 'var(--first, var(--second)) 10px` and ensure the ordering is correct.~~ +- ~~Format output with prettier when we save to ensure stage lint doesn't fail.~~ +- ~~make sure this works with shorthand spread~~ +- Look at the path info again. Do we ever need it? +- Convert token member within the analysis output to an array so we can hold multiple tokens. The order should be the order or priority. [0] being the highest pri with the last item in the array the least prioritized. +- Duplicate entries in useButtonStyles.styles.ts for useRootDisabledStyles.base.nested:hover.color - we might need to test case this +- ~~We've added the ability to analyze spreads but there's an issue where we find the tokens and call them out but they get nuked somewhere before we return them. Need to trace that and fix.~~ +- Add makeResetStyles specific tests in analyzer to ensure we process those correctly. +- ~~Button has some weird patterns in it where it uses makeResetStyles and then uses enums to pull in the styles, we might need to account for those as well.~~ +- ~~Some property assignments can also be function calls, we need to process this scenario~~ +- ~~`createCustomFocusIndicatorStyle` is a special function that is used throughout the library so we might be able to special case it~~ +- ~~if we have file imports we need to analyze those such as importing base styles~~ + ~~- Manage makeResetStyles (likely same as makeStyles)~~ +- ~~what if we have multiple `makeStyles` calls merged, are we handling that correctly or just nuking the conflicts in our output?~~ +- as we update the functionality, we should update our test cases to reflect the new functionality we support and ensure it works. +- ~~if we have functions we can't process (or other code for that matter), can we add that data into our report so we know to manually go deal with it?~~ +- ~~assignedSlots in output to track which slots classes are applied to~~ +- ~~Add variables full name to metadata (i.e. classNames.icon instead of just 'icon)~~ +- ~~Module importing~~ -## Running unit tests +## Features -Run `nx test token-analyzer` to execute the unit tests via [Jest](https://jestjs.io). +- Scans TypeScript/JavaScript style files for token usage +- Tracks both direct token references and variables that reference tokens +- Follows imports to resolve token references across files +- Generates detailed JSON reports of token usage +- Performance tracking and debugging capabilities +- Handles nested style objects and property assignments + +## Installation + +```bash +npm install --save-dev @your-org/token-analyzer +``` + +## Usage + +### Via CLI + +The analyzer can be run from the command line: + +```bash +npm run analyze-tokens -- [sourceDir] [outputFile] [flags] +``` + +#### Arguments: + +- `sourceDir`: Directory to analyze (default: `./src`) +- `outputFile`: Output JSON file path (default: `./token-analysis.json`) + +#### Flags: + +- `--debug`: Enable debug logging +- `--perf`: Enable performance metrics + +Examples: + +```bash +# Analyze src directory with default output +npm run analyze-tokens + +# Analyze specific directory with custom output +npm run analyze-tokens -- ./components ./analysis.json + +# Run with debug logging +npm run analyze-tokens -- --debug + +# Run with performance metrics +npm run analyze-tokens -- --perf + +# Run with both debug and performance tracking +npm run analyze-tokens -- --debug --perf +``` + +### Programmatic Usage + +```typescript +import { analyzeProjectStyles } from '@your-org/token-analyzer'; + +async function analyze() { + const results = await analyzeProjectStyles('./src', './analysis.json', { + debug: true, + perf: true, + }); + + console.log(`Analyzed ${Object.keys(results).length} files`); +} +``` + +## Configuration + +The analyzer identifies style files based on naming conventions. By default, it looks for: + +- Files containing `style` or `styles` in the name +- Files with extensions: `.ts`, `.tsx`, `.js`, `.jsx`, `.mjs` + +### Debug Configuration + +Debug and performance tracking can be configured via: + +1. CLI flags (as shown above) +2. Programmatic options when calling `analyzeProjectStyles` +3. Environment variables: + - `TOKEN_ANALYZER_DEBUG=true` + - `TOKEN_ANALYZER_PERF=true` + +## Output Format + +The analyzer generates a JSON file with the following structure: + +```typescript +{ + "path/to/file.ts": { + "styleName": { + "tokens": [ + { + "property": "color", + "token": "tokens.colors.primary", + "fromVariable": false // true if reference comes from a variable + } + ], + "nested": { + "hover": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colors.secondary", + "fromVariable": true, + "sourceFile": "path/to/variables.ts" // only present for variable references + } + ] + } + } + } + } +} +``` + +## Development + +### Project Structure + +``` +src/ + ├── index.ts # Main entry point + ├── astAnalyzer.ts # AST analysis logic + ├── fileOperations.ts # File handling utilities + ├── formatter.ts # Output formatting + ├── debugUtils.ts # Debug and performance utilities + └── types.ts # TypeScript type definitions +``` + +### Running Tests + +```bash +npm test +``` + +### Building + +```bash +npm run build +``` + +## Pending Improvements + +- [ ] Add more granular performance metrics +- [ ] Implement different levels of debug logging +- [ ] Add output format customization +- [ ] Add parallel processing options +- [ ] Add token pattern customization +- [ ] Add file pattern customization + +## Contributing + +1. Fork the repository +2. Create your feature branch (`git checkout -b feature/amazing-feature`) +3. Commit your changes (`git commit -m 'Add some amazing feature'`) +4. Push to the branch (`git push origin feature/amazing-feature`) +5. Open a Pull Request + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. From 0a87658de420a058a3b119fa8d0944c22e07ce9a Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 2 Apr 2025 14:03:17 -0700 Subject: [PATCH 05/75] fixing gen issues --- nx.json | 5 ----- 1 file changed, 5 deletions(-) diff --git a/nx.json b/nx.json index 09a6ebd2d..8fe633f7d 100644 --- a/nx.json +++ b/nx.json @@ -38,11 +38,6 @@ "options": { "packageRoot": "dist/packages/{projectName}" } - }, - "@nx/js:swc": { - "cache": true, - "dependsOn": ["^build"], - "inputs": ["production", "^production"] } }, "namedInputs": { From 2a5b7c113211dbd38674554a35dda2b5780c29b7 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 2 Apr 2025 16:02:57 -0700 Subject: [PATCH 06/75] updating packages fixing lint issues updating readme --- package.json | 2 + packages/token-analyzer/README.md | 1 + packages/token-analyzer/package.json | 4 +- packages/token-analyzer/src/astAnalyzer.ts | 198 ++++++++++++------ packages/token-analyzer/src/debugUtils.ts | 6 +- packages/token-analyzer/src/fileOperations.ts | 17 +- packages/token-analyzer/src/index.ts | 38 ++-- packages/token-analyzer/src/moduleResolver.ts | 42 +++- yarn.lock | 29 ++- 9 files changed, 241 insertions(+), 96 deletions(-) diff --git a/package.json b/package.json index 0ea76ec47..c909d0920 100644 --- a/package.json +++ b/package.json @@ -71,6 +71,7 @@ "@testing-library/react": "16.1.0", "@types/jest": "29.5.14", "@types/node": "20.14.9", + "@types/prettier": "^2.6.2", "@types/react": "18.3.1", "@types/react-dom": "18.3.0", "@types/react-virtualized-auto-sizer": "^1.0.1", @@ -98,6 +99,7 @@ "storybook": "7.6.20", "stylelint": "^15.10.3", "syncpack": "^9.8.6", + "ts-morph": "24.0.0", "ts-node": "10.9.2", "tslib": "^2.3.0", "typescript": "5.7.3", diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index 6fcf0d787..687b4ccc3 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -24,6 +24,7 @@ A static analysis tool that scans your project's style files to track and analyz - ~~assignedSlots in output to track which slots classes are applied to~~ - ~~Add variables full name to metadata (i.e. classNames.icon instead of just 'icon)~~ - ~~Module importing~~ +- add config to point to custom prettier config for file output. ## Features diff --git a/packages/token-analyzer/package.json b/packages/token-analyzer/package.json index 105fb7581..b4f2ee84e 100644 --- a/packages/token-analyzer/package.json +++ b/packages/token-analyzer/package.json @@ -4,9 +4,9 @@ "main": "./src/index.js", "types": "./src/index.d.ts", "dependencies": { - "ts-morph": "24.0.1", + "ts-morph": "^24.0.0", "typescript": "5.7.3", - "prettier": "2.8.8" + "prettier": "^2.6.2" }, "scripts": { "analyze-tokens": "NODE_OPTIONS=\"--loader ts-node/esm\" ts-node-esm src/index.ts", diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index 3b02ce229..97ba2b9e6 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -1,5 +1,10 @@ -/* eslint-disable no-console */ -import { Project, Node, SourceFile, PropertyAssignment, SpreadAssignment } from 'ts-morph'; +import { + Project, + Node, + SourceFile, + PropertyAssignment, + SpreadAssignment, +} from 'ts-morph'; import { TokenReference, StyleAnalysis, @@ -11,9 +16,17 @@ import { StyleTokens, } from './types.js'; import { log, measure, measureAsync } from './debugUtils.js'; -import { analyzeImports, processImportedStringTokens, ImportedValue } from './importAnalyzer.js'; +import { + analyzeImports, + processImportedStringTokens, + ImportedValue, +} from './importAnalyzer.js'; import { extractTokensFromCssVars } from './cssVarTokenExtractor.js'; -import { extractTokensFromText, getPropertiesForShorthand, isTokenReference } from './tokenUtils.js'; +import { + extractTokensFromText, + getPropertiesForShorthand, + isTokenReference, +} from './tokenUtils.js'; const makeResetStylesToken = 'resetStyles'; @@ -40,7 +53,7 @@ interface VariableMapping { function processStyleProperty( prop: PropertyAssignment | SpreadAssignment, importedValues: Map | undefined = undefined, - isResetStyles?: Boolean, + isResetStyles?: boolean ): TokenReference[] { const tokens: TokenReference[] = []; const parentName = Node.isPropertyAssignment(prop) ? prop.getName() : ''; @@ -61,13 +74,18 @@ function processStyleProperty( // Check for CSS var() syntax that might contain tokens if (text.includes('var(')) { - const cssVarTokens = extractTokensFromCssVars(text, path[path.length - 1] || parentName, path, TOKEN_REGEX); + const cssVarTokens = extractTokensFromCssVars( + text, + path[path.length - 1] || parentName, + path, + TOKEN_REGEX + ); tokens.push(...cssVarTokens); } else { // Check for direct token references const matches = extractTokensFromText(node); if (matches.length > 0) { - matches.forEach(match => { + matches.forEach((match) => { tokens.push({ property: path[path.length - 1] || parentName, token: match, @@ -82,7 +100,7 @@ function processStyleProperty( // First check if it matches the token regex directly const matches = extractTokensFromText(node); if (matches.length > 0) { - matches.forEach(match => { + matches.forEach((match) => { tokens.push({ property: path[path.length - 1] || parentName, token: match, @@ -98,7 +116,7 @@ function processStyleProperty( path[path.length - 1] || parentName, text, path, - TOKEN_REGEX, + TOKEN_REGEX ); tokens.push(...importTokens); } @@ -113,7 +131,7 @@ function processStyleProperty( }); } } else if (Node.isObjectLiteralExpression(node)) { - node.getProperties().forEach(childProp => { + node.getProperties().forEach((childProp) => { if (Node.isPropertyAssignment(childProp)) { const childName = childProp.getName(); processNode(childProp.getInitializer(), [...path, childName]); @@ -125,7 +143,10 @@ function processStyleProperty( } else if (Node.isSpreadAssignment(node)) { // Handle spread elements processNode(node.getExpression(), path); - } else if (Node.isCallExpression(node) && node.getExpression().getText() === 'createCustomFocusIndicatorStyle') { + } else if ( + Node.isCallExpression(node) && + node.getExpression().getText() === 'createCustomFocusIndicatorStyle' + ) { const focus = `:focus`; const focusWithin = `:focus-within`; let nestedModifier = focus; @@ -134,7 +155,7 @@ function processStyleProperty( const passedOptions = node.getArguments()[1]; if (passedOptions && Node.isObjectLiteralExpression(passedOptions)) { - passedOptions.getProperties().forEach(property => { + passedOptions.getProperties().forEach((property) => { if (Node.isPropertyAssignment(property)) { const optionName = property.getName(); if (optionName === 'selector') { @@ -150,10 +171,14 @@ function processStyleProperty( } if (passedTokens && Node.isObjectLiteralExpression(passedTokens)) { - passedTokens.getProperties().forEach(property => { + passedTokens.getProperties().forEach((property) => { if (Node.isPropertyAssignment(property)) { const childName = property.getName(); - processNode(property.getInitializer(), [...path, nestedModifier, childName]); + processNode(property.getInitializer(), [ + ...path, + nestedModifier, + childName, + ]); } }); } @@ -165,17 +190,21 @@ function processStyleProperty( if (affectedProperties.length > 0) { // Process each argument and apply it to all affected properties - node.getArguments().forEach(argument => { + node.getArguments().forEach((argument) => { processNodeForAffectedProperties(argument, affectedProperties, path); }); } else { // Generic handling of functions that are not whitelisted - node.getArguments().forEach(argument => { + node.getArguments().forEach((argument) => { if (Node.isObjectLiteralExpression(argument)) { - argument.getProperties().forEach(property => { + argument.getProperties().forEach((property) => { if (Node.isPropertyAssignment(property)) { const childName = property.getName(); - processNode(property.getInitializer(), [...path, functionName, childName]); + processNode(property.getInitializer(), [ + ...path, + functionName, + childName, + ]); } }); } @@ -187,7 +216,7 @@ function processStyleProperty( text, path[path.length - 1] || parentName, [...path, functionName], - TOKEN_REGEX, + TOKEN_REGEX ); tokens.push(...cssVarTokens); } @@ -198,14 +227,18 @@ function processStyleProperty( } // Helper function to process nodes for multiple affected properties - function processNodeForAffectedProperties(node: Node, properties: string[], basePath: string[]): void { + function processNodeForAffectedProperties( + node: Node, + properties: string[], + basePath: string[] + ): void { if (!node) { return; } // If this is a direct token reference if (Node.isPropertyAccessExpression(node) && isTokenReference(node)) { - properties.forEach(property => { + properties.forEach((property) => { tokens.push({ property, token: node.getText(), @@ -216,14 +249,18 @@ function processStyleProperty( } // If this is an identifier that might be a variable - if (Node.isIdentifier(node) && importedValues && importedValues.has(node.getText())) { - properties.forEach(property => { + if ( + Node.isIdentifier(node) && + importedValues && + importedValues.has(node.getText()) + ) { + properties.forEach((property) => { const importTokens = processImportedStringTokens( importedValues, property, node.getText(), basePath, - TOKEN_REGEX, + TOKEN_REGEX ); tokens.push(...importTokens); }); @@ -237,8 +274,8 @@ function processStyleProperty( // Check for tokens in the text const matches = extractTokensFromText(node); if (matches.length > 0) { - properties.forEach(property => { - matches.forEach(match => { + properties.forEach((property) => { + matches.forEach((match) => { tokens.push({ property, token: match, @@ -250,8 +287,13 @@ function processStyleProperty( // Check for CSS vars if (text.includes('var(')) { - properties.forEach(property => { - const cssVarTokens = extractTokensFromCssVars(text, property, basePath, TOKEN_REGEX); + properties.forEach((property) => { + const cssVarTokens = extractTokensFromCssVars( + text, + property, + basePath, + TOKEN_REGEX + ); tokens.push(...cssVarTokens); }); } @@ -281,8 +323,11 @@ function processStyleProperty( function analyzeMergeClasses(sourceFile: SourceFile): StyleMapping[] { const mappings: StyleMapping[] = []; - sourceFile.forEachDescendant(node => { - if (Node.isCallExpression(node) && node.getExpression().getText() === 'mergeClasses') { + sourceFile.forEachDescendant((node) => { + if ( + Node.isCallExpression(node) && + node.getExpression().getText() === 'mergeClasses' + ) { const parentNode = node.getParent(); let slotName = ''; if (Node.isBinaryExpression(parentNode)) { @@ -300,7 +345,7 @@ function analyzeMergeClasses(sourceFile: SourceFile): StyleMapping[] { * coded it for now but this could be improved. */ - node.getArguments().forEach(arg => { + node.getArguments().forEach((arg) => { // Handle direct style references if (Node.isPropertyAccessExpression(arg)) { mapping.baseStyles.push(arg.getText()); @@ -348,13 +393,13 @@ function analyzeMergeClasses(sourceFile: SourceFile): StyleMapping[] { */ function createStyleContent(tokens: TokenReference[]): StyleContent { const content: StyleContent = { - tokens: tokens.filter(t => { + tokens: tokens.filter((t) => { return t.path.length === 1; }), }; // Nested structures have paths longer than 1 - const nestedTokens = tokens.filter(t => t.path.length > 1); + const nestedTokens = tokens.filter((t) => t.path.length > 1); if (nestedTokens.length > 0) { content.nested = nestedTokens.reduce((acc, token) => { const nestedKey = token.path[0]; @@ -383,18 +428,22 @@ function createMetadata(styleMappings: StyleMapping[]): StyleMetadata { styleConditions: {}, }; - styleMappings.forEach(mapping => { - mapping.baseStyles.forEach(style => { + styleMappings.forEach((mapping) => { + mapping.baseStyles.forEach((style) => { if (metadata.styleConditions[style]) { metadata.styleConditions[style].isBase = true; } else { - metadata.styleConditions[style] = { isBase: true, slotName: mapping.slotName || '' }; + metadata.styleConditions[style] = { + isBase: true, + slotName: mapping.slotName || '', + }; } }); mapping.conditionalStyles.forEach(({ style, condition }) => { if (metadata.styleConditions[style]) { - metadata.styleConditions[style].conditions = metadata.styleConditions[style].conditions || []; + metadata.styleConditions[style].conditions = + metadata.styleConditions[style].conditions || []; if (condition) { metadata.styleConditions[style].conditions!.push(condition); } @@ -415,17 +464,23 @@ function createMetadata(styleMappings: StyleMapping[]): StyleMetadata { */ async function analyzeMakeStyles( sourceFile: SourceFile, - importedValues: Map | undefined = undefined, + importedValues: Map | undefined = undefined ): Promise { const analysis: StyleAnalysis = {}; - sourceFile.forEachDescendant(node => { - if (Node.isCallExpression(node) && node.getExpression().getText() === 'makeStyles') { + sourceFile.forEachDescendant((node) => { + if ( + Node.isCallExpression(node) && + node.getExpression().getText() === 'makeStyles' + ) { const stylesArg = node.getArguments()[0]; const parentNode = node.getParent(); - if (Node.isObjectLiteralExpression(stylesArg) && Node.isVariableDeclaration(parentNode)) { + if ( + Node.isObjectLiteralExpression(stylesArg) && + Node.isVariableDeclaration(parentNode) + ) { // Process the styles object - stylesArg.getProperties().forEach(prop => { + stylesArg.getProperties().forEach((prop) => { if (Node.isPropertyAssignment(prop)) { const styleName = prop.getName(); const tokens = processStyleProperty(prop, importedValues); @@ -439,7 +494,10 @@ async function analyzeMakeStyles( } }); } - } else if (Node.isCallExpression(node) && node.getExpression().getText() === 'makeResetStyles') { + } else if ( + Node.isCallExpression(node) && + node.getExpression().getText() === 'makeResetStyles' + ) { // Similar to above, but the styles are stored under the assigned function name instead of local variable const stylesArg = node.getArguments()[0]; const parentNode = node.getParent(); @@ -456,14 +514,14 @@ async function analyzeMakeStyles( }; if (Node.isObjectLiteralExpression(stylesArg)) { // Process the styles object - stylesArg.getProperties().forEach(prop => { + stylesArg.getProperties().forEach((prop) => { if (Node.isPropertyAssignment(prop)) { const tokens = processStyleProperty(prop, importedValues, true); if (tokens.length) { const styleContent = createStyleContent(tokens); - analysis[functionName][makeResetStylesToken].tokens = analysis[functionName][ - makeResetStylesToken - ].tokens.concat(styleContent.tokens); + analysis[functionName][makeResetStylesToken].tokens = analysis[ + functionName + ][makeResetStylesToken].tokens.concat(styleContent.tokens); analysis[functionName][makeResetStylesToken].nested = { ...analysis[functionName][makeResetStylesToken].nested, ...styleContent.nested, @@ -479,11 +537,14 @@ async function analyzeMakeStyles( const variables: VariableMapping[] = []; const styleFunctionNames: string[] = Object.keys(analysis); - sourceFile.forEachDescendant(node => { + sourceFile.forEachDescendant((node) => { // We do a second parse to link known style functions (i.e. makeResetStyles assigned function variable names). // This is necessary to handle cases where we're using a variable directly in mergeClasses to link styles. - if (Node.isCallExpression(node) && styleFunctionNames.includes(node.getExpression().getText())) { + if ( + Node.isCallExpression(node) && + styleFunctionNames.includes(node.getExpression().getText()) + ) { const parentNode = node.getParent(); const functionName = node.getExpression().getText(); if (Node.isVariableDeclaration(parentNode)) { @@ -498,12 +559,17 @@ async function analyzeMakeStyles( }); // Store our makeResetStyles assigned variables in the analysis to link later - variables.forEach(variable => { - Object.keys(analysis[variable.functionName]).forEach(styleName => { - if (analysis[variable.functionName][styleName].assignedVariables === undefined) { + variables.forEach((variable) => { + Object.keys(analysis[variable.functionName]).forEach((styleName) => { + if ( + analysis[variable.functionName][styleName].assignedVariables === + undefined + ) { analysis[variable.functionName][styleName].assignedVariables = []; } - analysis[variable.functionName][styleName].assignedVariables?.push(variable.variableName); + analysis[variable.functionName][styleName].assignedVariables?.push( + variable.variableName + ); }); }); @@ -513,21 +579,29 @@ async function analyzeMakeStyles( /** * Combines mergeClasses and makeStyles analysis, with import resolution */ -async function analyzeFile(filePath: string, project: Project): Promise { +async function analyzeFile( + filePath: string, + project: Project +): Promise { log(`Analyzing ${filePath}`); const sourceFile = project.addSourceFileAtPath(filePath); // First analyze imports to find imported string values log('Analyzing imports to find imported token values'); - const importedValues = await measureAsync('analyze imports', () => analyzeImports(sourceFile, project)); + const importedValues = await measureAsync('analyze imports', () => + analyzeImports(sourceFile, project) + ); // Second pass: Analyze mergeClasses - const styleMappings = measure('analyze mergeClasses', () => analyzeMergeClasses(sourceFile)); + const styleMappings = measure('analyze mergeClasses', () => + analyzeMergeClasses(sourceFile) + ); // Third pass: Analyze makeStyles with imported values - const styleAnalysis = await measureAsync('analyze makeStyles', () => - analyzeMakeStyles(sourceFile, importedValues), + const styleAnalysis = await measureAsync( + 'analyze makeStyles', + () => analyzeMakeStyles(sourceFile, importedValues) ); // Create enhanced analysis with separated styles and metadata @@ -537,5 +611,11 @@ async function analyzeFile(filePath: string, project: Project): Promise { console.error(`${prefix}${message}`, errorArg); }; -export const measureAsync = async (name: string, fn: () => Promise): Promise => { +export const measureAsync = async ( + name: string, + fn: () => Promise +): Promise => { if (!config.perf) { return fn(); } diff --git a/packages/token-analyzer/src/fileOperations.ts b/packages/token-analyzer/src/fileOperations.ts index 8d4c31ce3..97a70a528 100644 --- a/packages/token-analyzer/src/fileOperations.ts +++ b/packages/token-analyzer/src/fileOperations.ts @@ -13,14 +13,14 @@ export async function findStyleFiles(dir: string): Promise { async function scan(directory: string): Promise { const entries = await fs.readdir(directory, { withFileTypes: true }); - const scanPromises = entries.map(async entry => { + const scanPromises = entries.map(async (entry) => { const fullPath = join(directory, entry.name); if (entry.isDirectory() && !IGNORED_DIRS.includes(entry.name)) { await scan(fullPath); } else if ( (entry.name.includes('style') || entry.name.includes('styles')) && - VALID_EXTENSIONS.some(ext => entry.name.endsWith(ext)) + VALID_EXTENSIONS.some((ext) => entry.name.endsWith(ext)) ) { styleFiles.push(fullPath); } @@ -39,7 +39,10 @@ export async function findStyleFiles(dir: string): Promise { * @param currentFilePath The path of the file containing the import * @returns Resolved absolute path or null if not found */ -export async function resolveImportPath(importPath: string, currentFilePath: string): Promise { +export async function resolveImportPath( + importPath: string, + currentFilePath: string +): Promise { if (!importPath.startsWith('.')) { return null; } @@ -53,7 +56,9 @@ export async function resolveImportPath(importPath: string, currentFilePath: str if (stats.isFile()) { return absolutePath; } - } catch {} // Ignore errors and try extensions + } catch { + // Ignore errors and try extensions + } // Try with extensions for (const ext of VALID_EXTENSIONS) { @@ -63,7 +68,9 @@ export async function resolveImportPath(importPath: string, currentFilePath: str if (stats.isFile()) { return pathWithExt; } - } catch {} // Ignore errors and continue trying + } catch { + // Ignore errors and continue trying + } } return null; diff --git a/packages/token-analyzer/src/index.ts b/packages/token-analyzer/src/index.ts index c67bde652..6f527e2b1 100644 --- a/packages/token-analyzer/src/index.ts +++ b/packages/token-analyzer/src/index.ts @@ -1,4 +1,3 @@ -/* eslint-disable no-console */ import { Project } from 'ts-morph'; import { promises as fs } from 'fs'; import { relative } from 'path'; @@ -11,7 +10,7 @@ import { configure, log, error, measureAsync } from './debugUtils.js'; async function analyzeProjectStyles( rootDir: string, outputFile?: string, - options: { debug?: boolean; perf?: boolean } = {}, + options: { debug?: boolean; perf?: boolean } = {} ): Promise { configure({ debug: options.debug || false, @@ -22,7 +21,9 @@ async function analyzeProjectStyles( const results: AnalysisResults = {}; try { - const styleFiles = await measureAsync('find style files', () => findStyleFiles(rootDir)); + const styleFiles = await measureAsync('find style files', () => + findStyleFiles(rootDir) + ); console.log(`Found ${styleFiles.length} style files to analyze`); const project = new Project({ @@ -49,14 +50,17 @@ async function analyzeProjectStyles( if (outputFile) { await measureAsync('write output file', async () => { - const formatted = format(JSON.stringify(sortObjectByKeys(results), null, 2), { - parser: 'json', - printWidth: 120, - tabWidth: 2, - singleQuote: true, - trailingComma: 'all', - arrowParens: 'avoid', - }); + const formatted = format( + JSON.stringify(sortObjectByKeys(results), null, 2), + { + parser: 'json', + printWidth: 120, + tabWidth: 2, + singleQuote: true, + trailingComma: 'all', + arrowParens: 'avoid', + } + ); await fs.writeFile(outputFile, formatted, 'utf8'); console.log(`Analysis written to ${outputFile}`); }); @@ -86,11 +90,11 @@ function sortObjectByKeys(obj: Record): Record { function countTokens(analysis: FileAnalysis): number { let count = 0; - Object.values(analysis.styles).forEach(_value => { - Object.values(_value).forEach(value => { + Object.values(analysis.styles).forEach((_value) => { + Object.values(_value).forEach((value) => { count += value.tokens.length; if (value.nested) { - Object.values(value.nested).forEach(nestedValue => { + Object.values(value.nested).forEach((nestedValue) => { count += nestedValue.tokens.length; }); } @@ -109,11 +113,11 @@ if (isRunningDirectly) { console.log(`Starting analysis of ${rootDir}`); analyzeProjectStyles(rootDir, outputFile, { debug, perf }) - .then(results => { + .then((results) => { const totalFiles = Object.keys(results).length; let totalTokens = 0; - Object.values(results).forEach(fileAnalysis => { + Object.values(results).forEach((fileAnalysis) => { totalTokens += countTokens(fileAnalysis); }); @@ -121,7 +125,7 @@ if (isRunningDirectly) { console.log(`Processed ${totalFiles} files containing styles`); console.log(`Found ${totalTokens} token references`); }) - .catch(err => { + .catch((err) => { console.error('Analysis failed:', err); process.exit(1); }); diff --git a/packages/token-analyzer/src/moduleResolver.ts b/packages/token-analyzer/src/moduleResolver.ts index c41656a6b..7d3b0f3c3 100644 --- a/packages/token-analyzer/src/moduleResolver.ts +++ b/packages/token-analyzer/src/moduleResolver.ts @@ -11,7 +11,7 @@ export const tsUtils = { moduleName: string, containingFile: string, compilerOptions: ts.CompilerOptions, - host: ts.ModuleResolutionHost, + host: ts.ModuleResolutionHost ) => ts.resolveModuleName(moduleName, containingFile, compilerOptions, host), getFileSize: (filePath: string) => ts.sys.getFileSize?.(filePath), @@ -28,7 +28,10 @@ export const resolvedFilesCache = new Map(); /** * Creates a cache key for module resolution */ -function createCacheKey(moduleSpecifier: string, containingFile: string): string { +function createCacheKey( + moduleSpecifier: string, + containingFile: string +): string { return `${containingFile}:${moduleSpecifier}`; } @@ -45,9 +48,11 @@ function verifyFileExists(filePath: string | undefined | null): boolean { return tsUtils.fileExists(filePath); } catch (e) { // If that fails, try Node's fs as fallback + log(`Error checking file existence with TypeScript: ${filePath}`, e); try { return fs.existsSync(filePath); } catch (nestedE) { + log(`Error checking file existence: ${filePath}`, nestedE); return false; } } @@ -61,12 +66,19 @@ function verifyFileExists(filePath: string | undefined | null): boolean { * @param containingFile The file containing the import * @returns The absolute file path or null if it can't be resolved */ -export function resolveModulePath(project: Project, moduleSpecifier: string, containingFile: string): string | null { +export function resolveModulePath( + project: Project, + moduleSpecifier: string, + containingFile: string +): string | null { const cacheKey = createCacheKey(moduleSpecifier, containingFile); // Check cache first if (modulePathCache.has(cacheKey)) { - return modulePathCache.get(cacheKey)!; + const cachedPath = modulePathCache.get(cacheKey); + if (cachedPath) { + return cachedPath; + } } // For relative paths, try a simple path resolution first @@ -76,7 +88,9 @@ export function resolveModulePath(project: Project, moduleSpecifier: string, con const extensions = ['.ts', '.tsx', '.js', '.jsx', '.d.ts']; // Check if the module specifier already has a valid extension - const hasExtension = extensions.some(ext => moduleSpecifier.endsWith(ext)); + const hasExtension = extensions.some((ext) => + moduleSpecifier.endsWith(ext) + ); // 1. If it has an extension, try the exact path first if (hasExtension) { @@ -103,7 +117,7 @@ export function resolveModulePath(project: Project, moduleSpecifier: string, con ? path.resolve( basePath, path.dirname(moduleSpecifier), - path.basename(moduleSpecifier, path.extname(moduleSpecifier)), + path.basename(moduleSpecifier, path.extname(moduleSpecifier)) ) : path.resolve(basePath, moduleSpecifier); @@ -116,6 +130,7 @@ export function resolveModulePath(project: Project, moduleSpecifier: string, con } } catch (e) { // Fall through to TypeScript's module resolution + log(`Error resolving module: ${moduleSpecifier}`, e); } } @@ -124,7 +139,7 @@ export function resolveModulePath(project: Project, moduleSpecifier: string, con moduleSpecifier, containingFile, project.getCompilerOptions() as ts.CompilerOptions, - ts.sys, + ts.sys ); // Validate and cache the result @@ -155,12 +170,16 @@ export function resolveModulePath(project: Project, moduleSpecifier: string, con export function getModuleSourceFile( project: Project, moduleSpecifier: string, - containingFile: string, + containingFile: string ): SourceFile | null { log(`Resolving module: ${moduleSpecifier} from ${containingFile}`); // Step 1: Try to resolve the module to a file path - const resolvedPath = resolveModulePath(project, moduleSpecifier, containingFile); + const resolvedPath = resolveModulePath( + project, + moduleSpecifier, + containingFile + ); if (!resolvedPath) { log(`Could not resolve module: ${moduleSpecifier}`); return null; @@ -168,7 +187,10 @@ export function getModuleSourceFile( // Step 2: Check if we already have this file if (resolvedFilesCache.has(resolvedPath)) { - return resolvedFilesCache.get(resolvedPath)!; + const cachedFile = resolvedFilesCache.get(resolvedPath); + if (cachedFile) { + return cachedFile; + } } // Step 3: Get or add the file to the project diff --git a/yarn.lock b/yarn.lock index a4bd4feca..3cde3ca52 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5803,6 +5803,15 @@ resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== +"@ts-morph/common@~0.25.0": + version "0.25.0" + resolved "https://registry.yarnpkg.com/@ts-morph/common/-/common-0.25.0.tgz#b76cbd517118acc8eadaf12b2fc2d47f42923452" + integrity sha512-kMnZz+vGGHi4GoHnLmMhGNjm44kGtKUXGnOvrKmMwAuvNjM/PgKVGfUnL7IDvK7Jb2QQ82jq3Zmp04Gy+r3Dkg== + dependencies: + minimatch "^9.0.4" + path-browserify "^1.0.1" + tinyglobby "^0.2.9" + "@tsconfig/node10@^1.0.7": version "1.0.9" resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" @@ -6128,6 +6137,11 @@ resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== +"@types/prettier@^2.6.2": + version "2.7.3" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.3.tgz#3e51a17e291d01d17d3fc61422015a933af7a08f" + integrity sha512-+68kP9yzs4LMp7VNh8gdzMSPZFL44MLGqiHWvttYJe+6qnuVr4Ek9wSBQoveqY/r+LwjCcU29kNVkidwim+kYA== + "@types/pretty-hrtime@^1.0.0": version "1.0.1" resolved "https://registry.yarnpkg.com/@types/pretty-hrtime/-/pretty-hrtime-1.0.1.tgz#72a26101dc567b0d68fd956cf42314556e42d601" @@ -8016,6 +8030,11 @@ co@^4.6.0: resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== +code-block-writer@^13.0.3: + version "13.0.3" + resolved "https://registry.yarnpkg.com/code-block-writer/-/code-block-writer-13.0.3.tgz#90f8a84763a5012da7af61319dd638655ae90b5b" + integrity sha512-Oofo0pq3IKnsFtuHqSF7TqBfr71aeyZDVJ0HpmqB7FBM2qEigL0iPONSCZSO9pE9dZTAxANe5XHG9Uy0YMv8cg== + collect-v8-coverage@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" @@ -16510,7 +16529,7 @@ tinyexec@^0.3.2: resolved "https://registry.yarnpkg.com/tinyexec/-/tinyexec-0.3.2.tgz#941794e657a85e496577995c6eef66f53f42b3d2" integrity sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA== -tinyglobby@^0.2.12: +tinyglobby@^0.2.12, tinyglobby@^0.2.9: version "0.2.12" resolved "https://registry.yarnpkg.com/tinyglobby/-/tinyglobby-0.2.12.tgz#ac941a42e0c5773bd0b5d08f32de82e74a1a61b5" integrity sha512-qkf4trmKSIiMTs/E63cxH+ojC2unam7rJ0WrauAzpT3ECNTxGRMlaXxVbfxMUC/w0LaYk6jQ4y/nGR9uBO3tww== @@ -16643,6 +16662,14 @@ ts-loader@^9.3.1: micromatch "^4.0.0" semver "^7.3.4" +ts-morph@24.0.0: + version "24.0.0" + resolved "https://registry.yarnpkg.com/ts-morph/-/ts-morph-24.0.0.tgz#6249b526ade40cf99c8803e7abdae6c65882e58e" + integrity sha512-2OAOg/Ob5yx9Et7ZX4CvTCc0UFoZHwLEJ+dpDPSUi5TgwwlTlX47w+iFRrEwzUZwYACjq83cgjS/Da50Ga37uw== + dependencies: + "@ts-morph/common" "~0.25.0" + code-block-writer "^13.0.3" + ts-node@10.9.2: version "10.9.2" resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f" From 921bca427e6141b140af1166d26044723cb763a1 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 3 Apr 2025 17:32:45 -0700 Subject: [PATCH 07/75] add script to find the nearest tsconfig add end to end test with button styles example --- .../src/__tests__/analyzer.test.ts | 12 +- .../src/__tests__/cssVarE2E.test.ts | 29 +- .../token-analyzer/src/__tests__/e2e.test.ts | 29 + .../src/__tests__/moduleResolver.test.ts | 52 +- .../src/__tests__/packageImports.test.ts | 124 ++-- .../src/__tests__/reexportTracking.test.ts | 70 +- .../test-files/useButtonStyles.styles.ts | 619 ++++++++++++++++++ .../src/__tests__/typeCheckerImports.test.ts | 50 +- .../token-analyzer/src/findTsConfigPath.ts | 20 + packages/token-analyzer/src/index.ts | 3 + 10 files changed, 898 insertions(+), 110 deletions(-) create mode 100644 packages/token-analyzer/src/__tests__/e2e.test.ts create mode 100644 packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts create mode 100644 packages/token-analyzer/src/findTsConfigPath.ts diff --git a/packages/token-analyzer/src/__tests__/analyzer.test.ts b/packages/token-analyzer/src/__tests__/analyzer.test.ts index 1d6390842..3a3af15d8 100644 --- a/packages/token-analyzer/src/__tests__/analyzer.test.ts +++ b/packages/token-analyzer/src/__tests__/analyzer.test.ts @@ -3,6 +3,7 @@ import { analyzeFile } from '../astAnalyzer.js'; import { sampleStyles } from './sample-styles.js'; import * as path from 'path'; import * as fs from 'fs/promises'; +import { findTsConfigPath } from '../findTsConfigPath'; describe('Token Analyzer', () => { let project: Project; @@ -16,6 +17,7 @@ describe('Token Analyzer', () => { await fs.writeFile(tempFilePath, sampleStyles); project = new Project({ + tsConfigFilePath: findTsConfigPath() || '', skipAddingFilesFromTsConfig: true, skipFileDependencyResolution: false, }); @@ -41,13 +43,13 @@ describe('Token Analyzer', () => { expect.objectContaining({ property: 'color', token: 'tokens.colorNeutralForeground1', - }), + }) ); expect(styles.useStyles.root.tokens).toContainEqual( expect.objectContaining({ property: 'borderRightColor', token: 'tokens.colorNeutralStrokeDisabled', - }), + }) ); // Verify anotherSlot styles @@ -55,7 +57,7 @@ describe('Token Analyzer', () => { expect.objectContaining({ property: 'color', token: 'tokens.colorNeutralForeground2', - }), + }) ); // Verify focus function styles @@ -77,6 +79,8 @@ describe('Token Analyzer', () => { conditions: ['disabled'], slotName: 'root', }); - expect(metadata.styleConditions['styles.large'].conditions).toContain("size === 'large'"); + expect(metadata.styleConditions['styles.large'].conditions).toContain( + "size === 'large'" + ); }); }); diff --git a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts index 73826e726..1e4e7018d 100644 --- a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts +++ b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts @@ -3,6 +3,7 @@ import { Project } from 'ts-morph'; import { analyzeFile } from '../astAnalyzer.js'; import * as path from 'path'; import * as fs from 'fs/promises'; +import { findTsConfigPath } from '../findTsConfigPath'; // Test file contents const cssVarsStyleFile = ` @@ -75,7 +76,7 @@ describe('CSS Variable Token Extraction E2E', () => { // Initialize project project = new Project({ - tsConfigFilePath: path.join(tempDir, '../../../tsconfig.json'), + tsConfigFilePath: findTsConfigPath() || '', skipAddingFilesFromTsConfig: true, }); }); @@ -104,7 +105,7 @@ describe('CSS Variable Token Extraction E2E', () => { expect.objectContaining({ property: 'color', token: 'tokens.colorNeutralForeground1', - }), + }) ); // 2. Verify CSS variable with token @@ -113,7 +114,7 @@ describe('CSS Variable Token Extraction E2E', () => { expect.objectContaining({ property: 'color', token: 'tokens.colorBrandForeground4', - }), + }) ); // 3. Verify imported direct token @@ -123,7 +124,7 @@ describe('CSS Variable Token Extraction E2E', () => { property: 'color', token: 'tokens.colorBrandForeground6', isVariableReference: true, - }), + }) ); // 4. Verify imported CSS variable with token @@ -133,7 +134,7 @@ describe('CSS Variable Token Extraction E2E', () => { property: 'color', token: 'tokens.colorBrandForeground3', isVariableReference: true, - }), + }) ); // 5. Verify nested CSS variable with token @@ -142,7 +143,7 @@ describe('CSS Variable Token Extraction E2E', () => { expect.objectContaining({ property: 'background', token: 'tokens.colorBrandForeground2', - }), + }) ); // 6. Verify imported nested CSS variable with token @@ -152,7 +153,7 @@ describe('CSS Variable Token Extraction E2E', () => { property: 'color', token: 'tokens.colorNeutralForeground3', isVariableReference: true, - }), + }) ); // 8. Verify imported complex CSS variable with multiple tokens @@ -167,7 +168,7 @@ describe('CSS Variable Token Extraction E2E', () => { token: 'tokens.colorNeutralBackground1', isVariableReference: true, }), - ]), + ]) ); }); }); @@ -196,7 +197,7 @@ describe('CSS Variable Cross-Module Resolution E2E', () => { export const primaryToken = tokens.colorBrandPrimary; export const secondaryToken = tokens.colorBrandSecondary; export const furtherMargin = tokens.spacingVerticalXXL; - `, + ` ); await fs.writeFile( @@ -211,7 +212,7 @@ describe('CSS Variable Cross-Module Resolution E2E', () => { export const multiTokenVar = \`var(--multi, \${primaryToken} \${tokens.colorBrandSecondary})\`; export const someMargin = tokens.spacingHorizontalXXL; export const someOtherMargin = furtherMargin; - `, + ` ); await fs.writeFile( @@ -220,7 +221,7 @@ describe('CSS Variable Cross-Module Resolution E2E', () => { // Re-export everything export * from './colors'; export * from './variables'; - `, + ` ); await fs.writeFile( @@ -247,12 +248,12 @@ describe('CSS Variable Cross-Module Resolution E2E', () => { }); export default useStyles; - `, + ` ); // Initialize project project = new Project({ - tsConfigFilePath: path.join(tempDir, '../../../tsconfig.json'), + tsConfigFilePath: findTsConfigPath() || '', skipAddingFilesFromTsConfig: true, }); }); @@ -305,7 +306,7 @@ describe('CSS Variable Cross-Module Resolution E2E', () => { token: 'tokens.colorBrandSecondary', isVariableReference: true, }), - ]), + ]) ); }); }); diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts new file mode 100644 index 000000000..50de62be0 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -0,0 +1,29 @@ +import * as path from 'path'; +import * as fs from 'fs/promises'; +import { analyzeProjectStyles } from '../index.js'; + +describe('e2e test', () => { + let tempDir: string; + let targetPath: string; + + beforeAll(async () => { + // Create temp directory for test files + tempDir = path.join(process.cwd(), 'src', '__tests__', 'test-files'); + await fs.mkdir(tempDir, { recursive: true }); + targetPath = path.join( + process.cwd(), + 'src', + '__tests__', + 'test-files', + 'analysis.json' + ); + }); + + afterAll(async () => { + // Clean up temp files + await fs.rm(targetPath, { recursive: true, force: true }); + }); + test('analyze test button styles', async () => { + await analyzeProjectStyles(tempDir, targetPath); + }, 10000); +}); diff --git a/packages/token-analyzer/src/__tests__/moduleResolver.test.ts b/packages/token-analyzer/src/__tests__/moduleResolver.test.ts index 1fe1364e1..b928f07ba 100644 --- a/packages/token-analyzer/src/__tests__/moduleResolver.test.ts +++ b/packages/token-analyzer/src/__tests__/moduleResolver.test.ts @@ -10,6 +10,7 @@ import { } from '../moduleResolver'; import * as path from 'path'; import * as fs from 'fs'; +import { findTsConfigPath } from '../findTsConfigPath'; // Setup test directory and files const TEST_DIR = path.join(__dirname, 'test-module-resolver'); @@ -28,14 +29,14 @@ beforeAll(() => { import defaultExport from './constants'; const x = func(); - `, + ` ); fs.writeFileSync( path.join(TEST_DIR, 'utils.ts'), ` export const func = () => 'test'; - `, + ` ); fs.mkdirSync(path.join(TEST_DIR, 'styles'), { recursive: true }); @@ -46,14 +47,14 @@ beforeAll(() => { primary: 'tokens.colors.primary', secondary: 'tokens.colors.secondary' }; - `, + ` ); fs.writeFileSync( path.join(TEST_DIR, 'constants.ts'), ` export default 'tokens.default.value'; - `, + ` ); // Create a file with extension in the import @@ -61,7 +62,7 @@ beforeAll(() => { path.join(TEST_DIR, 'with-extension.ts'), ` import { func } from './utils.ts'; - `, + ` ); }); @@ -77,10 +78,7 @@ describe('Module resolver functions', () => { beforeEach(() => { // Create a fresh project for each test project = new Project({ - compilerOptions: { - target: ScriptTarget.ES2020, - moduleResolution: ModuleResolutionKind.NodeNext, - }, + tsConfigFilePath: findTsConfigPath() || '', }); // Clear caches @@ -98,7 +96,11 @@ describe('Module resolver functions', () => { test('resolves nested relative path correctly', () => { const sourceFilePath = path.join(TEST_DIR, 'source.ts'); - const result = resolveModulePath(project, './styles/theme', sourceFilePath); + const result = resolveModulePath( + project, + './styles/theme', + sourceFilePath + ); expect(result).not.toBeNull(); expect(result).toEqual(path.join(TEST_DIR, 'styles/theme.ts')); @@ -114,7 +116,11 @@ describe('Module resolver functions', () => { test('returns null for non-existent module', () => { const sourceFilePath = path.join(TEST_DIR, 'source.ts'); - const result = resolveModulePath(project, './non-existent', sourceFilePath); + const result = resolveModulePath( + project, + './non-existent', + sourceFilePath + ); expect(result).toBeNull(); }); @@ -133,7 +139,11 @@ describe('Module resolver functions', () => { }); // Second call should use cache - const secondResult = resolveModulePath(project, './utils', sourceFilePath); + const secondResult = resolveModulePath( + project, + './utils', + sourceFilePath + ); expect(secondResult).toEqual(firstResult); // Restore original function @@ -157,7 +167,11 @@ describe('Module resolver functions', () => { project.addSourceFileAtPath(sourceFilePath); // First call - const firstResult = getModuleSourceFile(project, './utils', sourceFilePath); + const firstResult = getModuleSourceFile( + project, + './utils', + sourceFilePath + ); expect(firstResult).not.toBeNull(); // Mock project.addSourceFileAtPath to verify cache is used @@ -167,7 +181,11 @@ describe('Module resolver functions', () => { }); // Second call should use cache - const secondResult = getModuleSourceFile(project, './utils', sourceFilePath); + const secondResult = getModuleSourceFile( + project, + './utils', + sourceFilePath + ); expect(secondResult).toBe(firstResult); // Same instance // Restore original function @@ -178,7 +196,11 @@ describe('Module resolver functions', () => { const sourceFilePath = path.join(TEST_DIR, 'source.ts'); project.addSourceFileAtPath(sourceFilePath); - const result = getModuleSourceFile(project, './non-existent', sourceFilePath); + const result = getModuleSourceFile( + project, + './non-existent', + sourceFilePath + ); expect(result).toBeNull(); }); }); diff --git a/packages/token-analyzer/src/__tests__/packageImports.test.ts b/packages/token-analyzer/src/__tests__/packageImports.test.ts index 5977f7b6b..ca51a3bff 100644 --- a/packages/token-analyzer/src/__tests__/packageImports.test.ts +++ b/packages/token-analyzer/src/__tests__/packageImports.test.ts @@ -1,8 +1,13 @@ // packageImports.test.ts import { Project, ModuleResolutionKind, ScriptTarget } from 'ts-morph'; -import { resolveModulePath, clearModuleCache, tsUtils } from '../moduleResolver'; +import { + resolveModulePath, + clearModuleCache, + tsUtils, +} from '../moduleResolver'; import * as path from 'path'; import * as fs from 'fs'; +import { findTsConfigPath } from '../findTsConfigPath'; // Setup test directory and mock node_modules structure const TEST_DIR = path.join(__dirname, 'test-package-imports'); @@ -28,7 +33,7 @@ beforeAll(() => { ` import { Component } from '@scope/package'; import { helper } from 'some-package'; - `, + ` ); // Create package.json and index files for the scoped package @@ -38,7 +43,7 @@ beforeAll(() => { name: '@scope/package', version: '1.0.0', main: 'index.js', - }), + }) ); fs.writeFileSync( path.join(SCOPED_PACKAGE, 'index.js'), @@ -46,7 +51,7 @@ beforeAll(() => { export const Component = { theme: 'tokens.components.primary' }; - `, + ` ); // Create package.json and index files for the regular package @@ -56,7 +61,7 @@ beforeAll(() => { name: 'some-package', version: '1.0.0', main: './lib/index.js', - }), + }) ); // Create lib directory in the regular package @@ -66,7 +71,7 @@ beforeAll(() => { path.join(REGULAR_PACKAGE, 'lib', 'index.js'), ` export const helper = 'tokens.helpers.main'; - `, + ` ); }); @@ -83,10 +88,7 @@ describe('Package imports resolution', () => { beforeEach(() => { project = new Project({ - compilerOptions: { - target: ScriptTarget.ES2020, - moduleResolution: ModuleResolutionKind.NodeNext, - }, + tsConfigFilePath: findTsConfigPath() || '', }); // Setup workspace @@ -117,19 +119,31 @@ describe('Package imports resolution', () => { // Mock the TypeScript resolution for scoped packages tsUtils.resolveModuleName = jest .fn() - .mockImplementation((moduleName: string, containingFile: string, compilerOptions: any, host: any) => { - if (moduleName === '@scope/package') { - return { - resolvedModule: { - resolvedFileName: path.join(SCOPED_PACKAGE, 'index.js'), - extension: '.js', - isExternalLibraryImport: true, - }, - }; + .mockImplementation( + ( + moduleName: string, + containingFile: string, + compilerOptions: any, + host: any + ) => { + if (moduleName === '@scope/package') { + return { + resolvedModule: { + resolvedFileName: path.join(SCOPED_PACKAGE, 'index.js'), + extension: '.js', + isExternalLibraryImport: true, + }, + }; + } + // Call original for other cases + return originalResolve( + moduleName, + containingFile, + compilerOptions, + host + ); } - // Call original for other cases - return originalResolve(moduleName, containingFile, compilerOptions, host); - }); + ); const result = resolveModulePath(project, '@scope/package', sourceFilePath); @@ -144,19 +158,31 @@ describe('Package imports resolution', () => { // Mock the TypeScript resolution for regular packages tsUtils.resolveModuleName = jest .fn() - .mockImplementation((moduleName: string, containingFile: string, compilerOptions: any, host: any) => { - if (moduleName === 'some-package') { - return { - resolvedModule: { - resolvedFileName: path.join(REGULAR_PACKAGE, 'lib', 'index.js'), - extension: '.js', - isExternalLibraryImport: true, - }, - }; + .mockImplementation( + ( + moduleName: string, + containingFile: string, + compilerOptions: any, + host: any + ) => { + if (moduleName === 'some-package') { + return { + resolvedModule: { + resolvedFileName: path.join(REGULAR_PACKAGE, 'lib', 'index.js'), + extension: '.js', + isExternalLibraryImport: true, + }, + }; + } + // Call original for other cases + return originalResolve( + moduleName, + containingFile, + compilerOptions, + host + ); } - // Call original for other cases - return originalResolve(moduleName, containingFile, compilerOptions, host); - }); + ); const result = resolveModulePath(project, 'some-package', sourceFilePath); @@ -171,15 +197,31 @@ describe('Package imports resolution', () => { // Mock the TypeScript resolution to return null for non-existent packages tsUtils.resolveModuleName = jest .fn() - .mockImplementation((moduleName: string, containingFile: string, compilerOptions: any, host: any) => { - if (moduleName === 'non-existent-package') { - return { resolvedModule: undefined }; + .mockImplementation( + ( + moduleName: string, + containingFile: string, + compilerOptions: any, + host: any + ) => { + if (moduleName === 'non-existent-package') { + return { resolvedModule: undefined }; + } + // Call original for other cases + return originalResolve( + moduleName, + containingFile, + compilerOptions, + host + ); } - // Call original for other cases - return originalResolve(moduleName, containingFile, compilerOptions, host); - }); + ); - const result = resolveModulePath(project, 'non-existent-package', sourceFilePath); + const result = resolveModulePath( + project, + 'non-existent-package', + sourceFilePath + ); expect(result).toBeNull(); expect(tsUtils.resolveModuleName).toHaveBeenCalled(); diff --git a/packages/token-analyzer/src/__tests__/reexportTracking.test.ts b/packages/token-analyzer/src/__tests__/reexportTracking.test.ts index 338deae74..ce78a43a8 100644 --- a/packages/token-analyzer/src/__tests__/reexportTracking.test.ts +++ b/packages/token-analyzer/src/__tests__/reexportTracking.test.ts @@ -3,6 +3,7 @@ import { Project } from 'ts-morph'; import { analyzeImports, ImportedValue } from '../importAnalyzer'; import * as path from 'path'; import * as fs from 'fs'; +import { findTsConfigPath } from '../findTsConfigPath'; // Setup test directory with a chain of re-exports const TEST_DIR = path.join(__dirname, 'test-reexports'); @@ -26,7 +27,7 @@ beforeAll(() => { direct: DirectValue, default: DefaultExport }; - `, + ` ); // Create an index file that re-exports everything @@ -47,7 +48,7 @@ beforeAll(() => { // Re-export default export { default } from './defaults'; - `, + ` ); // Create a components file @@ -55,7 +56,7 @@ beforeAll(() => { path.join(TEST_DIR, 'components.ts'), ` export const Component = 'tokens.components.primary'; - `, + ` ); // Create a values file @@ -63,7 +64,7 @@ beforeAll(() => { path.join(TEST_DIR, 'values.ts'), ` export const Value = 'tokens.values.standard'; - `, + ` ); // Create a utils file @@ -71,7 +72,7 @@ beforeAll(() => { path.join(TEST_DIR, 'utils.ts'), ` export const Utils = 'tokens.utils.helper'; - `, + ` ); // Create a defaults file @@ -80,7 +81,7 @@ beforeAll(() => { ` const DefaultValue = 'tokens.defaults.main'; export default DefaultValue; - `, + ` ); }); @@ -97,7 +98,7 @@ describe('Re-export tracking', () => { // Create a project using the existing directory structure // This makes it easier to test without needing to override compiler options project = new Project({ - tsConfigFilePath: path.join(TEST_DIR, '../../../tsconfig.json'), + tsConfigFilePath: findTsConfigPath() || '', skipAddingFilesFromTsConfig: true, }); @@ -111,7 +112,7 @@ describe('Re-export tracking', () => { esModuleInterop: true, skipLibCheck: true, }, - }), + }) ); }); @@ -119,31 +120,48 @@ describe('Re-export tracking', () => { const mainFile = path.join(TEST_DIR, 'main.ts'); const sourceFile = project.addSourceFileAtPath(mainFile); - const importedValues: Map = await analyzeImports(sourceFile, project); + const importedValues: Map = await analyzeImports( + sourceFile, + project + ); // Check that Component was correctly resolved from components.ts expect(importedValues.has('Component')).toBe(true); - expect(importedValues.get('Component')?.value).toBe('tokens.components.primary'); - expect(importedValues.get('Component')?.sourceFile).toContain('components.ts'); + expect(importedValues.get('Component')?.value).toBe( + 'tokens.components.primary' + ); + expect(importedValues.get('Component')?.sourceFile).toContain( + 'components.ts' + ); }); test('follows aliased re-export chain', async () => { const mainFile = path.join(TEST_DIR, 'main.ts'); const sourceFile = project.addSourceFileAtPath(mainFile); - const importedValues: Map = await analyzeImports(sourceFile, project); + const importedValues: Map = await analyzeImports( + sourceFile, + project + ); // Check that AliasedValue was correctly resolved from values.ts expect(importedValues.has('AliasedValue')).toBe(true); - expect(importedValues.get('AliasedValue')?.value).toBe('tokens.values.standard'); - expect(importedValues.get('AliasedValue')?.sourceFile).toContain('values.ts'); + expect(importedValues.get('AliasedValue')?.value).toBe( + 'tokens.values.standard' + ); + expect(importedValues.get('AliasedValue')?.sourceFile).toContain( + 'values.ts' + ); }); test('follows namespace re-export', async () => { const mainFile = path.join(TEST_DIR, 'main.ts'); const sourceFile = project.addSourceFileAtPath(mainFile); - const importedValues: Map = await analyzeImports(sourceFile, project); + const importedValues: Map = await analyzeImports( + sourceFile, + project + ); // Check that Utils from namespace export was correctly resolved expect(importedValues.has('Utils')).toBe(true); @@ -155,11 +173,16 @@ describe('Re-export tracking', () => { const mainFile = path.join(TEST_DIR, 'main.ts'); const sourceFile = project.addSourceFileAtPath(mainFile); - const importedValues: Map = await analyzeImports(sourceFile, project); + const importedValues: Map = await analyzeImports( + sourceFile, + project + ); // Check that DirectValue was correctly resolved from index.ts expect(importedValues.has('DirectValue')).toBe(true); - expect(importedValues.get('DirectValue')?.value).toBe('tokens.direct.value'); + expect(importedValues.get('DirectValue')?.value).toBe( + 'tokens.direct.value' + ); expect(importedValues.get('DirectValue')?.sourceFile).toContain('index.ts'); }); @@ -167,11 +190,18 @@ describe('Re-export tracking', () => { const mainFile = path.join(TEST_DIR, 'main.ts'); const sourceFile = project.addSourceFileAtPath(mainFile); - const importedValues: Map = await analyzeImports(sourceFile, project); + const importedValues: Map = await analyzeImports( + sourceFile, + project + ); // Check that DefaultExport was correctly resolved from defaults.ts expect(importedValues.has('DefaultExport')).toBe(true); - expect(importedValues.get('DefaultExport')?.value).toBe('tokens.defaults.main'); - expect(importedValues.get('DefaultExport')?.sourceFile).toContain('defaults.ts'); + expect(importedValues.get('DefaultExport')?.value).toBe( + 'tokens.defaults.main' + ); + expect(importedValues.get('DefaultExport')?.sourceFile).toContain( + 'defaults.ts' + ); }); }); diff --git a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts new file mode 100644 index 000000000..b18d6ee42 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts @@ -0,0 +1,619 @@ +import { + iconFilledClassName, + iconRegularClassName, +} from '@fluentui/react-icons'; +import { createCustomFocusIndicatorStyle } from '@fluentui/react-tabster'; +import { tokens } from '@fluentui/react-theme'; +import { + shorthands, + makeStyles, + makeResetStyles, + mergeClasses, +} from '@griffel/react'; +import type { SlotClassNames } from '@fluentui/react-utilities'; +import type { ButtonSlots, ButtonState } from '@fluentui/react-components'; + +export const buttonClassNames: SlotClassNames = { + root: 'fui-Button', + icon: 'fui-Button__icon', +}; + +const iconSpacingVar = '--fui-Button__icon--spacing'; + +const buttonSpacingSmall = '3px'; +const buttonSpacingSmallWithIcon = '1px'; +const buttonSpacingMedium = '5px'; +const buttonSpacingLarge = '8px'; +const buttonSpacingLargeWithIcon = '7px'; + +/* Firefox has box shadow sizing issue at some zoom levels + * this will ensure the inset boxShadow is always uniform + * without affecting other browser platforms + */ +const boxShadowStrokeWidthThinMoz = `calc(${tokens.strokeWidthThin} + 0.25px)`; + +const useRootBaseClassName = makeResetStyles({ + alignItems: 'center', + boxSizing: 'border-box', + display: 'inline-flex', + justifyContent: 'center', + textDecorationLine: 'none', + verticalAlign: 'middle', + + margin: 0, + overflow: 'hidden', + + backgroundColor: tokens.colorNeutralBackground1, + color: tokens.colorNeutralForeground1, + border: `${tokens.strokeWidthThin} solid ${tokens.colorNeutralStroke1}`, + + fontFamily: tokens.fontFamilyBase, + outlineStyle: 'none', + + ':hover': { + backgroundColor: tokens.colorNeutralBackground1Hover, + borderColor: tokens.colorNeutralStroke1Hover, + color: tokens.colorNeutralForeground1Hover, + + cursor: 'pointer', + }, + + ':hover:active': { + backgroundColor: tokens.colorNeutralBackground1Pressed, + borderColor: tokens.colorNeutralStroke1Pressed, + color: tokens.colorNeutralForeground1Pressed, + + outlineStyle: 'none', + }, + + padding: `${buttonSpacingMedium} ${tokens.spacingHorizontalM}`, + minWidth: '96px', + borderRadius: tokens.borderRadiusMedium, + + fontSize: tokens.fontSizeBase300, + fontWeight: tokens.fontWeightSemibold, + lineHeight: tokens.lineHeightBase300, + + // Transition styles + + transitionDuration: tokens.durationFaster, + transitionProperty: 'background, border, color', + transitionTimingFunction: tokens.curveEasyEase, + + '@media screen and (prefers-reduced-motion: reduce)': { + transitionDuration: '0.01ms', + }, + + // High contrast styles + + '@media (forced-colors: active)': { + ':focus': { + borderColor: 'ButtonText', + }, + + ':hover': { + backgroundColor: 'HighlightText', + borderColor: 'Highlight', + color: 'Highlight', + forcedColorAdjust: 'none', + }, + + ':hover:active': { + backgroundColor: 'HighlightText', + borderColor: 'Highlight', + color: 'Highlight', + forcedColorAdjust: 'none', + }, + }, + + // Focus styles + + ...createCustomFocusIndicatorStyle({ + borderColor: tokens.colorStrokeFocus2, + borderRadius: tokens.borderRadiusMedium, + borderWidth: '1px', + outline: `${tokens.strokeWidthThick} solid ${tokens.colorTransparentStroke}`, + boxShadow: `0 0 0 ${tokens.strokeWidthThin} ${tokens.colorStrokeFocus2} + inset + `, + zIndex: 1, + }), + + // BUGFIX: Mozilla specific styles (Mozilla BugID: 1857642) + '@supports (-moz-appearance:button)': { + ...createCustomFocusIndicatorStyle({ + boxShadow: `0 0 0 ${boxShadowStrokeWidthThinMoz} ${tokens.colorStrokeFocus2} + inset + `, + }), + }, +}); + +const useIconBaseClassName = makeResetStyles({ + alignItems: 'center', + display: 'inline-flex', + justifyContent: 'center', + + fontSize: '20px', + height: '20px', + width: '20px', + + [iconSpacingVar]: tokens.spacingHorizontalSNudge, +}); + +const useRootStyles = makeStyles({ + // Appearance variations + outline: { + backgroundColor: tokens.colorTransparentBackground, + + ':hover': { + backgroundColor: tokens.colorTransparentBackgroundHover, + }, + + ':hover:active': { + backgroundColor: tokens.colorTransparentBackgroundPressed, + }, + }, + primary: { + backgroundColor: tokens.colorBrandBackground, + ...shorthands.borderColor('transparent'), + color: tokens.colorNeutralForegroundOnBrand, + + ':hover': { + backgroundColor: tokens.colorBrandBackgroundHover, + ...shorthands.borderColor('transparent'), + color: tokens.colorNeutralForegroundOnBrand, + }, + + ':hover:active': { + backgroundColor: tokens.colorBrandBackgroundPressed, + ...shorthands.borderColor('transparent'), + color: tokens.colorNeutralForegroundOnBrand, + }, + + '@media (forced-colors: active)': { + backgroundColor: 'Highlight', + ...shorthands.borderColor('HighlightText'), + color: 'HighlightText', + forcedColorAdjust: 'none', + + ':hover': { + backgroundColor: 'HighlightText', + ...shorthands.borderColor('Highlight'), + color: 'Highlight', + }, + + ':hover:active': { + backgroundColor: 'HighlightText', + ...shorthands.borderColor('Highlight'), + color: 'Highlight', + }, + }, + }, + secondary: { + /* The secondary styles are exactly the same as the base styles. */ + }, + subtle: { + backgroundColor: tokens.colorSubtleBackground, + ...shorthands.borderColor('transparent'), + color: tokens.colorNeutralForeground2, + + ':hover': { + backgroundColor: tokens.colorSubtleBackgroundHover, + ...shorthands.borderColor('transparent'), + color: tokens.colorNeutralForeground2Hover, + [`& .${iconFilledClassName}`]: { + display: 'inline', + }, + [`& .${iconRegularClassName}`]: { + display: 'none', + }, + [`& .${buttonClassNames.icon}`]: { + color: tokens.colorNeutralForeground2BrandHover, + }, + }, + + ':hover:active': { + backgroundColor: tokens.colorSubtleBackgroundPressed, + ...shorthands.borderColor('transparent'), + color: tokens.colorNeutralForeground2Pressed, + [`& .${iconFilledClassName}`]: { + display: 'inline', + }, + [`& .${iconRegularClassName}`]: { + display: 'none', + }, + [`& .${buttonClassNames.icon}`]: { + color: tokens.colorNeutralForeground2BrandPressed, + }, + }, + + '@media (forced-colors: active)': { + ':hover': { + color: 'Highlight', + + [`& .${buttonClassNames.icon}`]: { + color: 'Highlight', + }, + }, + ':hover:active': { + color: 'Highlight', + + [`& .${buttonClassNames.icon}`]: { + color: 'Highlight', + }, + }, + }, + }, + transparent: { + backgroundColor: tokens.colorTransparentBackground, + ...shorthands.borderColor('transparent'), + color: tokens.colorNeutralForeground2, + + ':hover': { + backgroundColor: tokens.colorTransparentBackgroundHover, + ...shorthands.borderColor('transparent'), + color: tokens.colorNeutralForeground2BrandHover, + [`& .${iconFilledClassName}`]: { + display: 'inline', + }, + [`& .${iconRegularClassName}`]: { + display: 'none', + }, + }, + + ':hover:active': { + backgroundColor: tokens.colorTransparentBackgroundPressed, + ...shorthands.borderColor('transparent'), + color: tokens.colorNeutralForeground2BrandPressed, + [`& .${iconFilledClassName}`]: { + display: 'inline', + }, + [`& .${iconRegularClassName}`]: { + display: 'none', + }, + }, + + '@media (forced-colors: active)': { + ':hover': { + backgroundColor: tokens.colorTransparentBackground, + color: 'Highlight', + }, + ':hover:active': { + backgroundColor: tokens.colorTransparentBackground, + color: 'Highlight', + }, + }, + }, + + // Shape variations + circular: { borderRadius: tokens.borderRadiusCircular }, + rounded: { + /* The borderRadius rounded styles are handled in the size variations */ + }, + square: { borderRadius: tokens.borderRadiusNone }, + + // Size variations + small: { + minWidth: '64px', + padding: `${buttonSpacingSmall} ${tokens.spacingHorizontalS}`, + borderRadius: tokens.borderRadiusMedium, + + fontSize: tokens.fontSizeBase200, + fontWeight: tokens.fontWeightRegular, + lineHeight: tokens.lineHeightBase200, + }, + smallWithIcon: { + paddingBottom: buttonSpacingSmallWithIcon, + paddingTop: buttonSpacingSmallWithIcon, + }, + medium: { + /* defined in base styles */ + }, + large: { + minWidth: '96px', + padding: `${buttonSpacingLarge} ${tokens.spacingHorizontalL}`, + borderRadius: tokens.borderRadiusMedium, + + fontSize: tokens.fontSizeBase400, + fontWeight: tokens.fontWeightSemibold, + lineHeight: tokens.lineHeightBase400, + }, + largeWithIcon: { + paddingBottom: buttonSpacingLargeWithIcon, + paddingTop: buttonSpacingLargeWithIcon, + }, +}); + +const useRootDisabledStyles = makeStyles({ + // Base styles + base: { + backgroundColor: tokens.colorNeutralBackgroundDisabled, + ...shorthands.borderColor(tokens.colorNeutralStrokeDisabled), + color: tokens.colorNeutralForegroundDisabled, + + cursor: 'not-allowed', + [`& .${buttonClassNames.icon}`]: { + color: tokens.colorNeutralForegroundDisabled, + }, + + ':hover': { + backgroundColor: tokens.colorNeutralBackgroundDisabled, + ...shorthands.borderColor(tokens.colorNeutralStrokeDisabled), + color: tokens.colorNeutralForegroundDisabled, + + cursor: 'not-allowed', + + [`& .${iconFilledClassName}`]: { + display: 'none', + }, + [`& .${iconRegularClassName}`]: { + display: 'inline', + }, + [`& .${buttonClassNames.icon}`]: { + color: tokens.colorNeutralForegroundDisabled, + }, + }, + + ':hover:active': { + backgroundColor: tokens.colorNeutralBackgroundDisabled, + ...shorthands.borderColor(tokens.colorNeutralStrokeDisabled), + color: tokens.colorNeutralForegroundDisabled, + + cursor: 'not-allowed', + + [`& .${iconFilledClassName}`]: { + display: 'none', + }, + [`& .${iconRegularClassName}`]: { + display: 'inline', + }, + [`& .${buttonClassNames.icon}`]: { + color: tokens.colorNeutralForegroundDisabled, + }, + }, + }, + + // High contrast styles + highContrast: { + '@media (forced-colors: active)': { + backgroundColor: 'ButtonFace', + ...shorthands.borderColor('GrayText'), + color: 'GrayText', + + ':focus': { + ...shorthands.borderColor('GrayText'), + }, + + ':hover': { + backgroundColor: 'ButtonFace', + ...shorthands.borderColor('GrayText'), + color: 'GrayText', + }, + + ':hover:active': { + backgroundColor: 'ButtonFace', + ...shorthands.borderColor('GrayText'), + color: 'GrayText', + }, + }, + }, + + // Appearance variations + outline: { + backgroundColor: tokens.colorTransparentBackground, + + ':hover': { + backgroundColor: tokens.colorTransparentBackground, + }, + + ':hover:active': { + backgroundColor: tokens.colorTransparentBackground, + }, + }, + primary: { + ...shorthands.borderColor('transparent'), + + ':hover': { + ...shorthands.borderColor('transparent'), + }, + + ':hover:active': { + ...shorthands.borderColor('transparent'), + }, + }, + secondary: { + /* The secondary styles are exactly the same as the base styles. */ + }, + subtle: { + backgroundColor: tokens.colorTransparentBackground, + ...shorthands.borderColor('transparent'), + + ':hover': { + backgroundColor: tokens.colorTransparentBackground, + ...shorthands.borderColor('transparent'), + }, + + ':hover:active': { + backgroundColor: tokens.colorTransparentBackground, + ...shorthands.borderColor('transparent'), + }, + }, + transparent: { + backgroundColor: tokens.colorTransparentBackground, + ...shorthands.borderColor('transparent'), + + ':hover': { + backgroundColor: tokens.colorTransparentBackground, + ...shorthands.borderColor('transparent'), + }, + + ':hover:active': { + backgroundColor: tokens.colorTransparentBackground, + ...shorthands.borderColor('transparent'), + }, + }, +}); + +const useRootFocusStyles = makeStyles({ + // Shape variations + circular: createCustomFocusIndicatorStyle({ + borderRadius: tokens.borderRadiusCircular, + }), + rounded: { + /* The rounded styles are exactly the same as the base styles. */ + }, + square: createCustomFocusIndicatorStyle({ + borderRadius: tokens.borderRadiusNone, + }), + + // Primary styles + primary: { + ...createCustomFocusIndicatorStyle({ + ...shorthands.borderColor(tokens.colorStrokeFocus2), + boxShadow: `${tokens.shadow2}, 0 0 0 ${tokens.strokeWidthThin} ${tokens.colorStrokeFocus2} inset, 0 0 0 ${tokens.strokeWidthThick} ${tokens.colorNeutralForegroundOnBrand} inset`, + ':hover': { + boxShadow: `${tokens.shadow2}, 0 0 0 ${tokens.strokeWidthThin} ${tokens.colorStrokeFocus2} inset`, + ...shorthands.borderColor(tokens.colorStrokeFocus2), + }, + }), + + // BUGFIX: Mozilla specific styles (Mozilla BugID: 1857642) + '@supports (-moz-appearance:button)': { + ...createCustomFocusIndicatorStyle({ + boxShadow: `${tokens.shadow2}, 0 0 0 ${boxShadowStrokeWidthThinMoz} ${tokens.colorStrokeFocus2} inset, 0 0 0 ${tokens.strokeWidthThick} ${tokens.colorNeutralForegroundOnBrand} inset`, + ':hover': { + boxShadow: `${tokens.shadow2}, 0 0 0 ${boxShadowStrokeWidthThinMoz} ${tokens.colorStrokeFocus2} inset`, + }, + }), + }, + }, + + // Size variations + small: createCustomFocusIndicatorStyle({ + borderRadius: tokens.borderRadiusSmall, + }), + medium: { + /* defined in base styles */ + }, + large: createCustomFocusIndicatorStyle({ + borderRadius: tokens.borderRadiusLarge, + }), +}); + +const useRootIconOnlyStyles = makeStyles({ + // Size variations + small: { + padding: buttonSpacingSmallWithIcon, + + minWidth: '24px', + maxWidth: '24px', + }, + medium: { + padding: buttonSpacingMedium, + + minWidth: '32px', + maxWidth: '32px', + }, + large: { + padding: buttonSpacingLargeWithIcon, + + minWidth: '40px', + maxWidth: '40px', + }, +}); + +const useIconStyles = makeStyles({ + // Size variations + small: { + fontSize: '20px', + height: '20px', + width: '20px', + + [iconSpacingVar]: tokens.spacingHorizontalXS, + }, + medium: { + /* defined in base styles */ + }, + large: { + fontSize: '24px', + height: '24px', + width: '24px', + + [iconSpacingVar]: tokens.spacingHorizontalSNudge, + }, + + // Icon position variations + before: { + marginRight: `var(${iconSpacingVar})`, + }, + after: { + marginLeft: `var(${iconSpacingVar})`, + }, +}); + +export const useButtonStyles_unstable = (state: ButtonState): ButtonState => { + 'use no memo'; + + const rootBaseClassName = useRootBaseClassName(); + const iconBaseClassName = useIconBaseClassName(); + + const rootStyles = useRootStyles(); + const rootDisabledStyles = useRootDisabledStyles(); + const rootFocusStyles = useRootFocusStyles(); + const rootIconOnlyStyles = useRootIconOnlyStyles(); + const iconStyles = useIconStyles(); + + const { + appearance, + disabled, + disabledFocusable, + icon, + iconOnly, + iconPosition, + shape, + size, + } = state; + + state.root.className = mergeClasses( + buttonClassNames.root, + rootBaseClassName, + + appearance && rootStyles[appearance], + + rootStyles[size], + icon && size === 'small' && rootStyles.smallWithIcon, + icon && size === 'large' && rootStyles.largeWithIcon, + rootStyles[shape], + + // Disabled styles + (disabled || disabledFocusable) && rootDisabledStyles.base, + (disabled || disabledFocusable) && rootDisabledStyles.highContrast, + appearance && + (disabled || disabledFocusable) && + rootDisabledStyles[appearance], + + // Focus styles + appearance === 'primary' && rootFocusStyles.primary, + rootFocusStyles[size], + rootFocusStyles[shape], + + // Icon-only styles + iconOnly && rootIconOnlyStyles[size], + + // User provided class name + state.root.className + ); + + if (state.icon) { + state.icon.className = mergeClasses( + buttonClassNames.icon, + iconBaseClassName, + !!state.root.children && iconStyles[iconPosition], + iconStyles[size], + state.icon.className + ); + } + + return state; +}; diff --git a/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts b/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts index 890ae2621..a43cad4e8 100644 --- a/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts +++ b/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts @@ -3,6 +3,7 @@ import { Project } from 'ts-morph'; import { analyzeImports, ImportedValue } from '../importAnalyzer'; import * as path from 'path'; import * as fs from 'fs'; +import { findTsConfigPath } from '../findTsConfigPath'; // Setup test directory with a chain of re-exports const TEST_DIR = path.join(__dirname, 'test-type-checker'); @@ -26,7 +27,7 @@ beforeAll(() => { direct: DirectValue, default: DefaultExport }; - `, + ` ); // Create an index file that re-exports everything @@ -47,7 +48,7 @@ beforeAll(() => { // Re-export default export { default } from './defaults'; - `, + ` ); // Create a components file @@ -55,7 +56,7 @@ beforeAll(() => { path.join(TEST_DIR, 'components.ts'), ` export const Component = 'tokens.components.primary'; - `, + ` ); // Create a values file @@ -63,7 +64,7 @@ beforeAll(() => { path.join(TEST_DIR, 'values.ts'), ` export const Value = 'tokens.values.standard'; - `, + ` ); // Create a utils file @@ -71,7 +72,7 @@ beforeAll(() => { path.join(TEST_DIR, 'utils.ts'), ` export const Utils = 'tokens.utils.helper'; - `, + ` ); // Create a defaults file @@ -80,7 +81,7 @@ beforeAll(() => { ` const DefaultValue = 'tokens.defaults.main'; export default DefaultValue; - `, + ` ); }); @@ -97,7 +98,7 @@ describe('Type Checker Import Analysis', () => { // Create a project using the existing directory structure // This makes it easier to test without needing to override compiler options project = new Project({ - tsConfigFilePath: path.join(TEST_DIR, '../../../tsconfig.json'), + tsConfigFilePath: findTsConfigPath() || '', skipAddingFilesFromTsConfig: true, }); @@ -111,7 +112,7 @@ describe('Type Checker Import Analysis', () => { esModuleInterop: true, skipLibCheck: true, }, - }), + }) ); }); @@ -122,17 +123,28 @@ describe('Type Checker Import Analysis', () => { // Add all other files to ensure project has complete type information project.addSourceFilesAtPaths([path.join(TEST_DIR, '**/*.ts')]); - const importedValues: Map = await analyzeImports(sourceFile, project); + const importedValues: Map = await analyzeImports( + sourceFile, + project + ); // Verify standard re-export (Component) expect(importedValues.has('Component')).toBe(true); - expect(importedValues.get('Component')?.value).toBe('tokens.components.primary'); - expect(importedValues.get('Component')?.sourceFile).toContain('components.ts'); + expect(importedValues.get('Component')?.value).toBe( + 'tokens.components.primary' + ); + expect(importedValues.get('Component')?.sourceFile).toContain( + 'components.ts' + ); // Verify aliased re-export (AliasedValue) expect(importedValues.has('AliasedValue')).toBe(true); - expect(importedValues.get('AliasedValue')?.value).toBe('tokens.values.standard'); - expect(importedValues.get('AliasedValue')?.sourceFile).toContain('values.ts'); + expect(importedValues.get('AliasedValue')?.value).toBe( + 'tokens.values.standard' + ); + expect(importedValues.get('AliasedValue')?.sourceFile).toContain( + 'values.ts' + ); // Verify namespace re-export (Utils) expect(importedValues.has('Utils')).toBe(true); @@ -141,12 +153,18 @@ describe('Type Checker Import Analysis', () => { // Verify direct export (DirectValue) expect(importedValues.has('DirectValue')).toBe(true); - expect(importedValues.get('DirectValue')?.value).toBe('tokens.direct.value'); + expect(importedValues.get('DirectValue')?.value).toBe( + 'tokens.direct.value' + ); expect(importedValues.get('DirectValue')?.sourceFile).toContain('index.ts'); // Verify default export (DefaultExport) expect(importedValues.has('DefaultExport')).toBe(true); - expect(importedValues.get('DefaultExport')?.value).toBe('tokens.defaults.main'); - expect(importedValues.get('DefaultExport')?.sourceFile).toContain('defaults.ts'); + expect(importedValues.get('DefaultExport')?.value).toBe( + 'tokens.defaults.main' + ); + expect(importedValues.get('DefaultExport')?.sourceFile).toContain( + 'defaults.ts' + ); }); }); diff --git a/packages/token-analyzer/src/findTsConfigPath.ts b/packages/token-analyzer/src/findTsConfigPath.ts new file mode 100644 index 000000000..f914a1de2 --- /dev/null +++ b/packages/token-analyzer/src/findTsConfigPath.ts @@ -0,0 +1,20 @@ +import * as path from 'path'; +import * as fs from 'fs'; + +export function findTsConfigPath(startDir = process.cwd()): string | null { + let currentDir = startDir; + const root = path.parse(currentDir).root; + + while (currentDir !== root) { + const tsConfigPath = path.join(currentDir, 'tsconfig.json'); + if (fs.existsSync(tsConfigPath)) { + return tsConfigPath; + } + // Move up to parent directory + currentDir = path.dirname(currentDir); + } + + // Check root directory as well + const rootTsConfigPath = path.join(root, 'tsconfig.json'); + return fs.existsSync(rootTsConfigPath) ? rootTsConfigPath : null; +} diff --git a/packages/token-analyzer/src/index.ts b/packages/token-analyzer/src/index.ts index 6f527e2b1..339d330ed 100644 --- a/packages/token-analyzer/src/index.ts +++ b/packages/token-analyzer/src/index.ts @@ -6,6 +6,7 @@ import { findStyleFiles } from './fileOperations.js'; import { analyzeFile } from './astAnalyzer.js'; import { AnalysisResults, FileAnalysis } from './types.js'; import { configure, log, error, measureAsync } from './debugUtils.js'; +import { findTsConfigPath } from './findTsConfigPath'; async function analyzeProjectStyles( rootDir: string, @@ -27,6 +28,8 @@ async function analyzeProjectStyles( console.log(`Found ${styleFiles.length} style files to analyze`); const project = new Project({ + // Get the nearest tsconfig.json file so we can resolve modules and paths correctly based on the project config + tsConfigFilePath: findTsConfigPath() || '', skipAddingFilesFromTsConfig: true, skipFileDependencyResolution: false, }); From 0266aa4b53ba14913c69b2abd606214f0048c63a Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 3 Apr 2025 17:33:36 -0700 Subject: [PATCH 08/75] update todos --- packages/token-analyzer/README.md | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index 687b4ccc3..63e0a8581 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -5,26 +5,13 @@ A static analysis tool that scans your project's style files to track and analyz ## TODO - we also need to ensure var analysis is done correctly after the refactor -- ~~**This is high pri now since we have components in source using this technique (see buttonstyles.styles.ts)** Handle very complex cases like `var(--optional-token, var(--semantic-token, ${some-other-var-with-a-string-or-fallback}))`. This other var might be in another package or file as well. Currently we won't handle this level of depth but we could do symbol extraction in the future if needed to resolve the chain fully. This will likely require changes in importAnalyzer.ts and structural changes in the data we return. On top of needing to find referenced symbols within an aliased template string literal, we might also then need to parse out var fallbacks within short hands. IE: `padding: 'var(--first, var(--second)) 10px` and ensure the ordering is correct.~~ -- ~~Format output with prettier when we save to ensure stage lint doesn't fail.~~ -- ~~make sure this works with shorthand spread~~ - Look at the path info again. Do we ever need it? - Convert token member within the analysis output to an array so we can hold multiple tokens. The order should be the order or priority. [0] being the highest pri with the last item in the array the least prioritized. - Duplicate entries in useButtonStyles.styles.ts for useRootDisabledStyles.base.nested:hover.color - we might need to test case this -- ~~We've added the ability to analyze spreads but there's an issue where we find the tokens and call them out but they get nuked somewhere before we return them. Need to trace that and fix.~~ - Add makeResetStyles specific tests in analyzer to ensure we process those correctly. -- ~~Button has some weird patterns in it where it uses makeResetStyles and then uses enums to pull in the styles, we might need to account for those as well.~~ -- ~~Some property assignments can also be function calls, we need to process this scenario~~ -- ~~`createCustomFocusIndicatorStyle` is a special function that is used throughout the library so we might be able to special case it~~ -- ~~if we have file imports we need to analyze those such as importing base styles~~ - ~~- Manage makeResetStyles (likely same as makeStyles)~~ -- ~~what if we have multiple `makeStyles` calls merged, are we handling that correctly or just nuking the conflicts in our output?~~ -- as we update the functionality, we should update our test cases to reflect the new functionality we support and ensure it works. -- ~~if we have functions we can't process (or other code for that matter), can we add that data into our report so we know to manually go deal with it?~~ -- ~~assignedSlots in output to track which slots classes are applied to~~ -- ~~Add variables full name to metadata (i.e. classNames.icon instead of just 'icon)~~ -- ~~Module importing~~ - add config to point to custom prettier config for file output. +- run shorthands functions with placeholder functions and parse out which properties are actually being set. This is a special case we will need to hard code and thus should make independent from the rest of our logic so it's easier to modify/remove. +- add tests for findTsConfigPath ## Features From cb228f44764e8b1c960d0a0849ebca1c31c394bc Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 3 Apr 2025 17:41:00 -0700 Subject: [PATCH 09/75] updating readme with root cause --- packages/token-analyzer/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index 63e0a8581..4883fb33f 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -7,7 +7,7 @@ A static analysis tool that scans your project's style files to track and analyz - we also need to ensure var analysis is done correctly after the refactor - Look at the path info again. Do we ever need it? - Convert token member within the analysis output to an array so we can hold multiple tokens. The order should be the order or priority. [0] being the highest pri with the last item in the array the least prioritized. -- Duplicate entries in useButtonStyles.styles.ts for useRootDisabledStyles.base.nested:hover.color - we might need to test case this +- Duplicate entries in useButtonStyles.styles.ts for useRootDisabledStyles.base.nested:hover.color - we might need to test case this. This turns out to not be a dupe but a misplaced item. There's multiple layers of nesting in this particular style and instead of creating another nested layer it just injects the token into the direct tokens list. We should create another layer of nested. - Add makeResetStyles specific tests in analyzer to ensure we process those correctly. - add config to point to custom prettier config for file output. - run shorthands functions with placeholder functions and parse out which properties are actually being set. This is a special case we will need to hard code and thus should make independent from the rest of our logic so it's easier to modify/remove. From f61813d5d94badbc9fe7805e2082c391dc9813e4 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 3 Apr 2025 17:47:34 -0700 Subject: [PATCH 10/75] updating readme save button analysis for comparison --- packages/token-analyzer/README.md | 3 +- .../token-analyzer/src/__tests__/e2e.test.ts | 2 +- .../src/__tests__/test-files/analysis.json | 881 ++++++++++++++++++ 3 files changed, 884 insertions(+), 2 deletions(-) create mode 100644 packages/token-analyzer/src/__tests__/test-files/analysis.json diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index 4883fb33f..11b8c71d4 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -7,11 +7,12 @@ A static analysis tool that scans your project's style files to track and analyz - we also need to ensure var analysis is done correctly after the refactor - Look at the path info again. Do we ever need it? - Convert token member within the analysis output to an array so we can hold multiple tokens. The order should be the order or priority. [0] being the highest pri with the last item in the array the least prioritized. -- Duplicate entries in useButtonStyles.styles.ts for useRootDisabledStyles.base.nested:hover.color - we might need to test case this. This turns out to not be a dupe but a misplaced item. There's multiple layers of nesting in this particular style and instead of creating another nested layer it just injects the token into the direct tokens list. We should create another layer of nested. +- Duplicate entries in useButtonStyles.styles.ts for useRootDisabledStyles.base.nested:hover.color - we might need to test case this. This turns out to not be a dupe but a misplaced item. There's multiple layers of nesting in this particular style and instead of creating another nested layer it just injects the token into the direct tokens list. We should create another layer of nested and this should be able to be done if we update the path info correctly. - Add makeResetStyles specific tests in analyzer to ensure we process those correctly. - add config to point to custom prettier config for file output. - run shorthands functions with placeholder functions and parse out which properties are actually being set. This is a special case we will need to hard code and thus should make independent from the rest of our logic so it's easier to modify/remove. - add tests for findTsConfigPath +- add tests for structure and output. We're processing the styles but not putting them in the right places right now. ## Features diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index 50de62be0..85cd5a631 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -21,7 +21,7 @@ describe('e2e test', () => { afterAll(async () => { // Clean up temp files - await fs.rm(targetPath, { recursive: true, force: true }); + // await fs.rm(targetPath, { recursive: true, force: true }); }); test('analyze test button styles', async () => { await analyzeProjectStyles(tempDir, targetPath); diff --git a/packages/token-analyzer/src/__tests__/test-files/analysis.json b/packages/token-analyzer/src/__tests__/test-files/analysis.json new file mode 100644 index 000000000..be3ded14d --- /dev/null +++ b/packages/token-analyzer/src/__tests__/test-files/analysis.json @@ -0,0 +1,881 @@ +{ + "useButtonStyles.styles.ts": { + "styles": { + "useRootBaseClassName": { + "resetStyles": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorNeutralBackground1", + "path": ["backgroundColor"] + }, + { + "property": "color", + "token": "tokens.colorNeutralForeground1", + "path": ["color"] + }, + { + "property": "border", + "token": "tokens.strokeWidthThin", + "path": ["border"] + }, + { + "property": "border", + "token": "tokens.colorNeutralStroke1", + "path": ["border"] + }, + { + "property": "fontFamily", + "token": "tokens.fontFamilyBase", + "path": ["fontFamily"] + }, + { + "property": "padding", + "token": "tokens.spacingHorizontalM", + "path": ["padding"] + }, + { + "property": "borderRadius", + "token": "tokens.borderRadiusMedium", + "path": ["borderRadius"] + }, + { + "property": "fontSize", + "token": "tokens.fontSizeBase300", + "path": ["fontSize"] + }, + { + "property": "fontWeight", + "token": "tokens.fontWeightSemibold", + "path": ["fontWeight"] + }, + { + "property": "lineHeight", + "token": "tokens.lineHeightBase300", + "path": ["lineHeight"] + }, + { + "property": "transitionDuration", + "token": "tokens.durationFaster", + "path": ["transitionDuration"] + }, + { + "property": "transitionTimingFunction", + "token": "tokens.curveEasyEase", + "path": ["transitionTimingFunction"] + } + ], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorNeutralBackground1Hover", + "path": [] + }, + { + "property": "borderColor", + "token": "tokens.colorNeutralStroke1Hover", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForeground1Hover", + "path": [] + } + ] + }, + "':hover:active'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorNeutralBackground1Pressed", + "path": [] + }, + { + "property": "borderColor", + "token": "tokens.colorNeutralStroke1Pressed", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForeground1Pressed", + "path": [] + } + ] + }, + "'@supports (-moz-appearance:button)'": { + "tokens": [ + { + "property": "boxShadow", + "token": "tokens.colorStrokeFocus2", + "path": [] + } + ] + } + }, + "isResetStyles": true, + "assignedVariables": ["rootBaseClassName"] + } + }, + "useIconBaseClassName": { + "resetStyles": { + "tokens": [ + { + "property": "[iconSpacingVar]", + "token": "tokens.spacingHorizontalSNudge", + "path": ["[iconSpacingVar]"] + } + ], + "nested": {}, + "isResetStyles": true, + "assignedVariables": ["iconBaseClassName"] + } + }, + "useRootStyles": { + "outline": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": ["backgroundColor"] + } + ], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackgroundHover", + "path": [] + } + ] + }, + "':hover:active'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackgroundPressed", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootStyles"] + }, + "primary": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorBrandBackground", + "path": ["backgroundColor"] + }, + { + "property": "color", + "token": "tokens.colorNeutralForegroundOnBrand", + "path": ["color"] + } + ], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorBrandBackgroundHover", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForegroundOnBrand", + "path": [] + } + ] + }, + "':hover:active'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorBrandBackgroundPressed", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForegroundOnBrand", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootStyles"] + }, + "subtle": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorSubtleBackground", + "path": ["backgroundColor"] + }, + { + "property": "color", + "token": "tokens.colorNeutralForeground2", + "path": ["color"] + } + ], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorSubtleBackgroundHover", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForeground2Hover", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForeground2BrandHover", + "path": [] + } + ] + }, + "':hover:active'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorSubtleBackgroundPressed", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForeground2Pressed", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForeground2BrandPressed", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootStyles"] + }, + "transparent": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": ["backgroundColor"] + }, + { + "property": "color", + "token": "tokens.colorNeutralForeground2", + "path": ["color"] + } + ], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackgroundHover", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForeground2BrandHover", + "path": [] + } + ] + }, + "':hover:active'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackgroundPressed", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForeground2BrandPressed", + "path": [] + } + ] + }, + "'@media (forced-colors: active)'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": [] + }, + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootStyles"] + }, + "circular": { + "tokens": [ + { + "property": "borderRadius", + "token": "tokens.borderRadiusCircular", + "path": ["borderRadius"] + } + ], + "assignedVariables": ["rootStyles"] + }, + "square": { + "tokens": [ + { + "property": "borderRadius", + "token": "tokens.borderRadiusNone", + "path": ["borderRadius"] + } + ], + "assignedVariables": ["rootStyles"] + }, + "small": { + "tokens": [ + { + "property": "padding", + "token": "tokens.spacingHorizontalS", + "path": ["padding"] + }, + { + "property": "borderRadius", + "token": "tokens.borderRadiusMedium", + "path": ["borderRadius"] + }, + { + "property": "fontSize", + "token": "tokens.fontSizeBase200", + "path": ["fontSize"] + }, + { + "property": "fontWeight", + "token": "tokens.fontWeightRegular", + "path": ["fontWeight"] + }, + { + "property": "lineHeight", + "token": "tokens.lineHeightBase200", + "path": ["lineHeight"] + } + ], + "assignedVariables": ["rootStyles"] + }, + "large": { + "tokens": [ + { + "property": "padding", + "token": "tokens.spacingHorizontalL", + "path": ["padding"] + }, + { + "property": "borderRadius", + "token": "tokens.borderRadiusMedium", + "path": ["borderRadius"] + }, + { + "property": "fontSize", + "token": "tokens.fontSizeBase400", + "path": ["fontSize"] + }, + { + "property": "fontWeight", + "token": "tokens.fontWeightSemibold", + "path": ["fontWeight"] + }, + { + "property": "lineHeight", + "token": "tokens.lineHeightBase400", + "path": ["lineHeight"] + } + ], + "assignedVariables": ["rootStyles"] + } + }, + "useRootDisabledStyles": { + "base": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorNeutralBackgroundDisabled", + "path": ["backgroundColor"] + }, + { + "property": "borderTopColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": ["borderTopColor"] + }, + { + "property": "borderRightColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": ["borderRightColor"] + }, + { + "property": "borderBottomColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": ["borderBottomColor"] + }, + { + "property": "borderLeftColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": ["borderLeftColor"] + }, + { + "property": "color", + "token": "tokens.colorNeutralForegroundDisabled", + "path": ["color"] + } + ], + "nested": { + "[`& .${buttonClassNames.icon}`]": { + "tokens": [ + { + "property": "color", + "token": "tokens.colorNeutralForegroundDisabled", + "path": [] + } + ] + }, + "':hover'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorNeutralBackgroundDisabled", + "path": [] + }, + { + "property": "borderTopColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": [] + }, + { + "property": "borderRightColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": [] + }, + { + "property": "borderBottomColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": [] + }, + { + "property": "borderLeftColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForegroundDisabled", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForegroundDisabled", + "path": [] + } + ] + }, + "':hover:active'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorNeutralBackgroundDisabled", + "path": [] + }, + { + "property": "borderTopColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": [] + }, + { + "property": "borderRightColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": [] + }, + { + "property": "borderBottomColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": [] + }, + { + "property": "borderLeftColor", + "token": "tokens.colorNeutralStrokeDisabled", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForegroundDisabled", + "path": [] + }, + { + "property": "color", + "token": "tokens.colorNeutralForegroundDisabled", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootDisabledStyles"] + }, + "outline": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": ["backgroundColor"] + } + ], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": [] + } + ] + }, + "':hover:active'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootDisabledStyles"] + }, + "subtle": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": ["backgroundColor"] + } + ], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": [] + } + ] + }, + "':hover:active'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootDisabledStyles"] + }, + "transparent": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": ["backgroundColor"] + } + ], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": [] + } + ] + }, + "':hover:active'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootDisabledStyles"] + } + }, + "useRootFocusStyles": { + "circular": { + "tokens": [], + "nested": { + ":focus": { + "tokens": [ + { + "property": "borderRadius", + "token": "tokens.borderRadiusCircular", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootFocusStyles"] + }, + "square": { + "tokens": [], + "nested": { + ":focus": { + "tokens": [ + { + "property": "borderRadius", + "token": "tokens.borderRadiusNone", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootFocusStyles"] + }, + "primary": { + "tokens": [], + "nested": { + ":focus": { + "tokens": [ + { + "property": "boxShadow", + "token": "tokens.shadow2", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.strokeWidthThin", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.colorStrokeFocus2", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.strokeWidthThick", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.colorNeutralForegroundOnBrand", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.shadow2", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.strokeWidthThin", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.colorStrokeFocus2", + "path": [] + }, + { + "property": "borderTopColor", + "token": "tokens.colorStrokeFocus2", + "path": [] + }, + { + "property": "borderRightColor", + "token": "tokens.colorStrokeFocus2", + "path": [] + }, + { + "property": "borderBottomColor", + "token": "tokens.colorStrokeFocus2", + "path": [] + }, + { + "property": "borderLeftColor", + "token": "tokens.colorStrokeFocus2", + "path": [] + } + ] + }, + "'@supports (-moz-appearance:button)'": { + "tokens": [ + { + "property": "boxShadow", + "token": "tokens.shadow2", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.colorStrokeFocus2", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.strokeWidthThick", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.colorNeutralForegroundOnBrand", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.shadow2", + "path": [] + }, + { + "property": "boxShadow", + "token": "tokens.colorStrokeFocus2", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootFocusStyles"] + }, + "small": { + "tokens": [], + "nested": { + ":focus": { + "tokens": [ + { + "property": "borderRadius", + "token": "tokens.borderRadiusSmall", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootFocusStyles"] + }, + "large": { + "tokens": [], + "nested": { + ":focus": { + "tokens": [ + { + "property": "borderRadius", + "token": "tokens.borderRadiusLarge", + "path": [] + } + ] + } + }, + "assignedVariables": ["rootFocusStyles"] + } + }, + "useRootIconOnlyStyles": {}, + "useIconStyles": { + "small": { + "tokens": [ + { + "property": "[iconSpacingVar]", + "token": "tokens.spacingHorizontalXS", + "path": ["[iconSpacingVar]"] + } + ], + "assignedVariables": ["iconStyles"] + }, + "large": { + "tokens": [ + { + "property": "[iconSpacingVar]", + "token": "tokens.spacingHorizontalSNudge", + "path": ["[iconSpacingVar]"] + } + ], + "assignedVariables": ["iconStyles"] + } + } + }, + "metadata": { + "styleConditions": { + "buttonClassNames.root": { + "isBase": true, + "slotName": "root" + }, + "rootBaseClassName": { + "isBase": true, + "slotName": "root" + }, + "rootStyles[size]": { + "isBase": true, + "slotName": "root" + }, + "rootStyles[shape]": { + "isBase": true, + "slotName": "root" + }, + "rootFocusStyles[size]": { + "isBase": true, + "slotName": "root" + }, + "rootFocusStyles[shape]": { + "isBase": true, + "slotName": "root" + }, + "state.root.className": { + "isBase": true, + "slotName": "root" + }, + "rootStyles.smallWithIcon": { + "conditions": ["icon && size === 'small'"], + "slotName": "root" + }, + "rootStyles.largeWithIcon": { + "conditions": ["icon && size === 'large'"], + "slotName": "root" + }, + "rootDisabledStyles.base": { + "conditions": ["(disabled || disabledFocusable)"], + "slotName": "root" + }, + "rootDisabledStyles.highContrast": { + "conditions": ["(disabled || disabledFocusable)"], + "slotName": "root" + }, + "rootFocusStyles.primary": { + "conditions": ["appearance === 'primary'"], + "slotName": "root" + }, + "buttonClassNames.icon": { + "isBase": true, + "slotName": "icon" + }, + "iconBaseClassName": { + "isBase": true, + "slotName": "icon" + }, + "iconStyles[size]": { + "isBase": true, + "slotName": "icon" + }, + "state.icon.className": { + "isBase": true, + "slotName": "icon" + } + } + } + } +} From cea1019b8cc2fdd9aa8e5f3d05876f144c4e2dca Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 3 Apr 2025 17:54:13 -0700 Subject: [PATCH 11/75] tracing root of issue --- packages/token-analyzer/src/astAnalyzer.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index 97ba2b9e6..0abaa335a 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -402,6 +402,9 @@ function createStyleContent(tokens: TokenReference[]): StyleContent { const nestedTokens = tokens.filter((t) => t.path.length > 1); if (nestedTokens.length > 0) { content.nested = nestedTokens.reduce((acc, token) => { + if (token.path.includes('[`& .${buttonClassNames.icon}`]')) { + console.log(token.path); + } const nestedKey = token.path[0]; if (!acc[nestedKey]) { From 771deecbd4b955ae1af754e292a8ab6ef1921d99 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 4 Apr 2025 14:26:07 -0700 Subject: [PATCH 12/75] fixed recursive nesting issue for correct data structure --- packages/token-analyzer/src/astAnalyzer.ts | 52 +++++++++++++++++----- 1 file changed, 42 insertions(+), 10 deletions(-) diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index 0abaa335a..a1b2bf9fd 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -401,23 +401,55 @@ function createStyleContent(tokens: TokenReference[]): StyleContent { // Nested structures have paths longer than 1 const nestedTokens = tokens.filter((t) => t.path.length > 1); if (nestedTokens.length > 0) { - content.nested = nestedTokens.reduce((acc, token) => { + const acc: StyleTokens = {}; + + /** + * Recursive function to create a nested structure for tokens + * This function will create a nested object structure based on the token path. + * @param token + * @param pathIndex where in the path we are, this allows us to preserve the path while recursing through it + * @param currentLevel the current level of the nested structure we're working on + */ + const createNestedStructure = ( + token: TokenReference, + pathIndex: number, + currentLevel: StyleTokens + ) => { if (token.path.includes('[`& .${buttonClassNames.icon}`]')) { console.log(token.path); } - const nestedKey = token.path[0]; + const nestedKey = token.path[pathIndex]; - if (!acc[nestedKey]) { - acc[nestedKey] = { tokens: [] }; + // if no token array exists, create one + if (!currentLevel[nestedKey]) { + currentLevel[nestedKey] = { tokens: [] }; } - acc[nestedKey].tokens.push({ - ...token, - path: [], // Reset path as we've used it for nesting - }); + // if we have a path length that is greater than our current index minus 1, we need to recurse + // this is because if we have more than a single item in our path left there's another level + if (token.path.length - 1 - pathIndex > 1) { + // Create a nested structure through a recursive call + if (!currentLevel[nestedKey].nested) { + currentLevel[nestedKey].nested = {}; + } + createNestedStructure( + token, + pathIndex + 1, + currentLevel[nestedKey].nested + ); + } else { + currentLevel[nestedKey].tokens.push({ + ...token, + path: token.path, + }); + } + }; + + nestedTokens.forEach((token) => { + createNestedStructure(token, 0, acc); + }); - return acc; - }, {}); + content.nested = acc; } return content; From 1cdf978bdb87a569245385c564d67305e8f24284 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 4 Apr 2025 16:16:12 -0700 Subject: [PATCH 13/75] updated analysis file with more complete path data --- .../src/__tests__/test-files/analysis.json | 367 ++++++++++-------- 1 file changed, 211 insertions(+), 156 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/test-files/analysis.json b/packages/token-analyzer/src/__tests__/test-files/analysis.json index be3ded14d..0309ed607 100644 --- a/packages/token-analyzer/src/__tests__/test-files/analysis.json +++ b/packages/token-analyzer/src/__tests__/test-files/analysis.json @@ -71,17 +71,17 @@ { "property": "backgroundColor", "token": "tokens.colorNeutralBackground1Hover", - "path": [] + "path": ["':hover'", "backgroundColor"] }, { "property": "borderColor", "token": "tokens.colorNeutralStroke1Hover", - "path": [] + "path": ["':hover'", "borderColor"] }, { "property": "color", "token": "tokens.colorNeutralForeground1Hover", - "path": [] + "path": ["':hover'", "color"] } ] }, @@ -90,28 +90,33 @@ { "property": "backgroundColor", "token": "tokens.colorNeutralBackground1Pressed", - "path": [] + "path": ["':hover:active'", "backgroundColor"] }, { "property": "borderColor", "token": "tokens.colorNeutralStroke1Pressed", - "path": [] + "path": ["':hover:active'", "borderColor"] }, { "property": "color", "token": "tokens.colorNeutralForeground1Pressed", - "path": [] + "path": ["':hover:active'", "color"] } ] }, "'@supports (-moz-appearance:button)'": { - "tokens": [ - { - "property": "boxShadow", - "token": "tokens.colorStrokeFocus2", - "path": [] + "tokens": [], + "nested": { + ":focus": { + "tokens": [ + { + "property": "boxShadow", + "token": "tokens.colorStrokeFocus2", + "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] + } + ] } - ] + } } }, "isResetStyles": true, @@ -147,7 +152,7 @@ { "property": "backgroundColor", "token": "tokens.colorTransparentBackgroundHover", - "path": [] + "path": ["':hover'", "backgroundColor"] } ] }, @@ -156,7 +161,7 @@ { "property": "backgroundColor", "token": "tokens.colorTransparentBackgroundPressed", - "path": [] + "path": ["':hover:active'", "backgroundColor"] } ] } @@ -182,12 +187,12 @@ { "property": "backgroundColor", "token": "tokens.colorBrandBackgroundHover", - "path": [] + "path": ["':hover'", "backgroundColor"] }, { "property": "color", "token": "tokens.colorNeutralForegroundOnBrand", - "path": [] + "path": ["':hover'", "color"] } ] }, @@ -196,12 +201,12 @@ { "property": "backgroundColor", "token": "tokens.colorBrandBackgroundPressed", - "path": [] + "path": ["':hover:active'", "backgroundColor"] }, { "property": "color", "token": "tokens.colorNeutralForegroundOnBrand", - "path": [] + "path": ["':hover:active'", "color"] } ] } @@ -227,38 +232,50 @@ { "property": "backgroundColor", "token": "tokens.colorSubtleBackgroundHover", - "path": [] + "path": ["':hover'", "backgroundColor"] }, { "property": "color", "token": "tokens.colorNeutralForeground2Hover", - "path": [] - }, - { - "property": "color", - "token": "tokens.colorNeutralForeground2BrandHover", - "path": [] + "path": ["':hover'", "color"] } - ] + ], + "nested": { + "[`& .${buttonClassNames.icon}`]": { + "tokens": [ + { + "property": "color", + "token": "tokens.colorNeutralForeground2BrandHover", + "path": ["':hover'", "[`& .${buttonClassNames.icon}`]", "color"] + } + ] + } + } }, "':hover:active'": { "tokens": [ { "property": "backgroundColor", "token": "tokens.colorSubtleBackgroundPressed", - "path": [] + "path": ["':hover:active'", "backgroundColor"] }, { "property": "color", "token": "tokens.colorNeutralForeground2Pressed", - "path": [] - }, - { - "property": "color", - "token": "tokens.colorNeutralForeground2BrandPressed", - "path": [] + "path": ["':hover:active'", "color"] } - ] + ], + "nested": { + "[`& .${buttonClassNames.icon}`]": { + "tokens": [ + { + "property": "color", + "token": "tokens.colorNeutralForeground2BrandPressed", + "path": ["':hover:active'", "[`& .${buttonClassNames.icon}`]", "color"] + } + ] + } + } } }, "assignedVariables": ["rootStyles"] @@ -282,12 +299,12 @@ { "property": "backgroundColor", "token": "tokens.colorTransparentBackgroundHover", - "path": [] + "path": ["':hover'", "backgroundColor"] }, { "property": "color", "token": "tokens.colorNeutralForeground2BrandHover", - "path": [] + "path": ["':hover'", "color"] } ] }, @@ -296,28 +313,37 @@ { "property": "backgroundColor", "token": "tokens.colorTransparentBackgroundPressed", - "path": [] + "path": ["':hover:active'", "backgroundColor"] }, { "property": "color", "token": "tokens.colorNeutralForeground2BrandPressed", - "path": [] + "path": ["':hover:active'", "color"] } ] }, "'@media (forced-colors: active)'": { - "tokens": [ - { - "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", - "path": [] - }, - { - "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", - "path": [] + "tokens": [], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": ["'@media (forced-colors: active)'", "':hover'", "backgroundColor"] + } + ] + }, + "':hover:active'": { + "tokens": [ + { + "property": "backgroundColor", + "token": "tokens.colorTransparentBackground", + "path": ["'@media (forced-colors: active)'", "':hover:active'", "backgroundColor"] + } + ] } - ] + } } }, "assignedVariables": ["rootStyles"] @@ -443,7 +469,7 @@ { "property": "color", "token": "tokens.colorNeutralForegroundDisabled", - "path": [] + "path": ["[`& .${buttonClassNames.icon}`]", "color"] } ] }, @@ -452,78 +478,90 @@ { "property": "backgroundColor", "token": "tokens.colorNeutralBackgroundDisabled", - "path": [] + "path": ["':hover'", "backgroundColor"] }, { "property": "borderTopColor", "token": "tokens.colorNeutralStrokeDisabled", - "path": [] + "path": ["':hover'", "borderTopColor"] }, { "property": "borderRightColor", "token": "tokens.colorNeutralStrokeDisabled", - "path": [] + "path": ["':hover'", "borderRightColor"] }, { "property": "borderBottomColor", "token": "tokens.colorNeutralStrokeDisabled", - "path": [] + "path": ["':hover'", "borderBottomColor"] }, { "property": "borderLeftColor", "token": "tokens.colorNeutralStrokeDisabled", - "path": [] - }, - { - "property": "color", - "token": "tokens.colorNeutralForegroundDisabled", - "path": [] + "path": ["':hover'", "borderLeftColor"] }, { "property": "color", "token": "tokens.colorNeutralForegroundDisabled", - "path": [] + "path": ["':hover'", "color"] } - ] + ], + "nested": { + "[`& .${buttonClassNames.icon}`]": { + "tokens": [ + { + "property": "color", + "token": "tokens.colorNeutralForegroundDisabled", + "path": ["':hover'", "[`& .${buttonClassNames.icon}`]", "color"] + } + ] + } + } }, "':hover:active'": { "tokens": [ { "property": "backgroundColor", "token": "tokens.colorNeutralBackgroundDisabled", - "path": [] + "path": ["':hover:active'", "backgroundColor"] }, { "property": "borderTopColor", "token": "tokens.colorNeutralStrokeDisabled", - "path": [] + "path": ["':hover:active'", "borderTopColor"] }, { "property": "borderRightColor", "token": "tokens.colorNeutralStrokeDisabled", - "path": [] + "path": ["':hover:active'", "borderRightColor"] }, { "property": "borderBottomColor", "token": "tokens.colorNeutralStrokeDisabled", - "path": [] + "path": ["':hover:active'", "borderBottomColor"] }, { "property": "borderLeftColor", "token": "tokens.colorNeutralStrokeDisabled", - "path": [] - }, - { - "property": "color", - "token": "tokens.colorNeutralForegroundDisabled", - "path": [] + "path": ["':hover:active'", "borderLeftColor"] }, { "property": "color", "token": "tokens.colorNeutralForegroundDisabled", - "path": [] + "path": ["':hover:active'", "color"] } - ] + ], + "nested": { + "[`& .${buttonClassNames.icon}`]": { + "tokens": [ + { + "property": "color", + "token": "tokens.colorNeutralForegroundDisabled", + "path": ["':hover:active'", "[`& .${buttonClassNames.icon}`]", "color"] + } + ] + } + } } }, "assignedVariables": ["rootDisabledStyles"] @@ -542,7 +580,7 @@ { "property": "backgroundColor", "token": "tokens.colorTransparentBackground", - "path": [] + "path": ["':hover'", "backgroundColor"] } ] }, @@ -551,7 +589,7 @@ { "property": "backgroundColor", "token": "tokens.colorTransparentBackground", - "path": [] + "path": ["':hover:active'", "backgroundColor"] } ] } @@ -572,7 +610,7 @@ { "property": "backgroundColor", "token": "tokens.colorTransparentBackground", - "path": [] + "path": ["':hover'", "backgroundColor"] } ] }, @@ -581,7 +619,7 @@ { "property": "backgroundColor", "token": "tokens.colorTransparentBackground", - "path": [] + "path": ["':hover:active'", "backgroundColor"] } ] } @@ -602,7 +640,7 @@ { "property": "backgroundColor", "token": "tokens.colorTransparentBackground", - "path": [] + "path": ["':hover'", "backgroundColor"] } ] }, @@ -611,7 +649,7 @@ { "property": "backgroundColor", "token": "tokens.colorTransparentBackground", - "path": [] + "path": ["':hover:active'", "backgroundColor"] } ] } @@ -628,7 +666,7 @@ { "property": "borderRadius", "token": "tokens.borderRadiusCircular", - "path": [] + "path": [":focus", "borderRadius"] } ] } @@ -643,7 +681,7 @@ { "property": "borderRadius", "token": "tokens.borderRadiusNone", - "path": [] + "path": [":focus", "borderRadius"] } ] } @@ -658,98 +696,115 @@ { "property": "boxShadow", "token": "tokens.shadow2", - "path": [] + "path": [":focus", "boxShadow"] }, { "property": "boxShadow", "token": "tokens.strokeWidthThin", - "path": [] + "path": [":focus", "boxShadow"] }, { "property": "boxShadow", "token": "tokens.colorStrokeFocus2", - "path": [] + "path": [":focus", "boxShadow"] }, { "property": "boxShadow", "token": "tokens.strokeWidthThick", - "path": [] + "path": [":focus", "boxShadow"] }, { "property": "boxShadow", "token": "tokens.colorNeutralForegroundOnBrand", - "path": [] - }, - { - "property": "boxShadow", - "token": "tokens.shadow2", - "path": [] - }, - { - "property": "boxShadow", - "token": "tokens.strokeWidthThin", - "path": [] - }, - { - "property": "boxShadow", - "token": "tokens.colorStrokeFocus2", - "path": [] - }, - { - "property": "borderTopColor", - "token": "tokens.colorStrokeFocus2", - "path": [] - }, - { - "property": "borderRightColor", - "token": "tokens.colorStrokeFocus2", - "path": [] - }, - { - "property": "borderBottomColor", - "token": "tokens.colorStrokeFocus2", - "path": [] - }, - { - "property": "borderLeftColor", - "token": "tokens.colorStrokeFocus2", - "path": [] + "path": [":focus", "boxShadow"] } - ] + ], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "boxShadow", + "token": "tokens.shadow2", + "path": [":focus", "':hover'", "boxShadow"] + }, + { + "property": "boxShadow", + "token": "tokens.strokeWidthThin", + "path": [":focus", "':hover'", "boxShadow"] + }, + { + "property": "boxShadow", + "token": "tokens.colorStrokeFocus2", + "path": [":focus", "':hover'", "boxShadow"] + }, + { + "property": "borderTopColor", + "token": "tokens.colorStrokeFocus2", + "path": [":focus", "':hover'", "borderTopColor"] + }, + { + "property": "borderRightColor", + "token": "tokens.colorStrokeFocus2", + "path": [":focus", "':hover'", "borderRightColor"] + }, + { + "property": "borderBottomColor", + "token": "tokens.colorStrokeFocus2", + "path": [":focus", "':hover'", "borderBottomColor"] + }, + { + "property": "borderLeftColor", + "token": "tokens.colorStrokeFocus2", + "path": [":focus", "':hover'", "borderLeftColor"] + } + ] + } + } }, "'@supports (-moz-appearance:button)'": { - "tokens": [ - { - "property": "boxShadow", - "token": "tokens.shadow2", - "path": [] - }, - { - "property": "boxShadow", - "token": "tokens.colorStrokeFocus2", - "path": [] - }, - { - "property": "boxShadow", - "token": "tokens.strokeWidthThick", - "path": [] - }, - { - "property": "boxShadow", - "token": "tokens.colorNeutralForegroundOnBrand", - "path": [] - }, - { - "property": "boxShadow", - "token": "tokens.shadow2", - "path": [] - }, - { - "property": "boxShadow", - "token": "tokens.colorStrokeFocus2", - "path": [] + "tokens": [], + "nested": { + ":focus": { + "tokens": [ + { + "property": "boxShadow", + "token": "tokens.shadow2", + "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] + }, + { + "property": "boxShadow", + "token": "tokens.colorStrokeFocus2", + "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] + }, + { + "property": "boxShadow", + "token": "tokens.strokeWidthThick", + "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] + }, + { + "property": "boxShadow", + "token": "tokens.colorNeutralForegroundOnBrand", + "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] + } + ], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "boxShadow", + "token": "tokens.shadow2", + "path": ["'@supports (-moz-appearance:button)'", ":focus", "':hover'", "boxShadow"] + }, + { + "property": "boxShadow", + "token": "tokens.colorStrokeFocus2", + "path": ["'@supports (-moz-appearance:button)'", ":focus", "':hover'", "boxShadow"] + } + ] + } + } } - ] + } } }, "assignedVariables": ["rootFocusStyles"] @@ -762,7 +817,7 @@ { "property": "borderRadius", "token": "tokens.borderRadiusSmall", - "path": [] + "path": [":focus", "borderRadius"] } ] } @@ -777,7 +832,7 @@ { "property": "borderRadius", "token": "tokens.borderRadiusLarge", - "path": [] + "path": [":focus", "borderRadius"] } ] } From 8f23bedd849f97b234e45525e31cf66c853fe99b Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 4 Apr 2025 16:48:11 -0700 Subject: [PATCH 14/75] exclude our test-files from dep checks --- packages/token-analyzer/eslint.config.js | 29 +++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/packages/token-analyzer/eslint.config.js b/packages/token-analyzer/eslint.config.js index 2be3ed1ba..a40a07939 100644 --- a/packages/token-analyzer/eslint.config.js +++ b/packages/token-analyzer/eslint.config.js @@ -1,7 +1,34 @@ const baseConfig = require('../../eslint.config.js'); +const newBaseConfig = baseConfig.map((config) => { + // Find the specific configuration entry that contains the @nx/dependency-checks rule + if (config.rules?.['@nx/dependency-checks']) { + // Create a new config object with the extended ignoredFiles array + return { + ...config, + rules: { + ...config.rules, + '@nx/dependency-checks': [ + config.rules['@nx/dependency-checks'][0], + { + ...config.rules['@nx/dependency-checks'][1], + ignoredFiles: [ + ...config.rules['@nx/dependency-checks'][1].ignoredFiles, + // Exclude our test files from the dep checks + '{projectRoot}/**/__tests__/test-files/**', + ], + }, + ], + }, + }; + } + + // Return other config entries unchanged + return config; +}); + module.exports = [ - ...baseConfig, + ...newBaseConfig, { files: ['**/*.ts', '**/*.tsx'], // Override or add rules here From b9a8943dd0054e0bea1fcc749d5b77e4018d190f Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 4 Apr 2025 22:50:50 -0700 Subject: [PATCH 15/75] updated todo --- packages/token-analyzer/README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index 11b8c71d4..8f7ad9292 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -1,18 +1,18 @@ # Design Token Usage Analyzer -A static analysis tool that scans your project's style files to track and analyze design token usage. The analyzer helps identify where and how design tokens are being used across your codebase, making it easier to maintain consistency and track token adoption. +A static analysis tool that scans your project's style files to track and analyze design token usage. The analyzer helps identify where and how design tokens are being used across your codebase, making it easier to maintain consistency and track token adoption. The data from this tool can also be used to create other tools like theme designers. ## TODO - we also need to ensure var analysis is done correctly after the refactor -- Look at the path info again. Do we ever need it? - Convert token member within the analysis output to an array so we can hold multiple tokens. The order should be the order or priority. [0] being the highest pri with the last item in the array the least prioritized. -- Duplicate entries in useButtonStyles.styles.ts for useRootDisabledStyles.base.nested:hover.color - we might need to test case this. This turns out to not be a dupe but a misplaced item. There's multiple layers of nesting in this particular style and instead of creating another nested layer it just injects the token into the direct tokens list. We should create another layer of nested and this should be able to be done if we update the path info correctly. + - Add makeResetStyles specific tests in analyzer to ensure we process those correctly. - add config to point to custom prettier config for file output. - run shorthands functions with placeholder functions and parse out which properties are actually being set. This is a special case we will need to hard code and thus should make independent from the rest of our logic so it's easier to modify/remove. - add tests for findTsConfigPath -- add tests for structure and output. We're processing the styles but not putting them in the right places right now. +- add tests for structure and output. ~~We're processing the styles but not putting them in the right places right now~~ +- primary styles using createCustomFocusIndicatorStyle with a shorthand as the first arg seems to get missed but the nested styles we catch (investigate) I think this just means we need to ensure we process shorthands here as well ## Features @@ -26,7 +26,7 @@ A static analysis tool that scans your project's style files to track and analyz ## Installation ```bash -npm install --save-dev @your-org/token-analyzer +npm install --save-dev @fluentui-contrib/token-analyzer ``` ## Usage From c163aaa4278e875514ab0b629586b8d8ca206d34 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 8 Apr 2025 01:40:53 -0700 Subject: [PATCH 16/75] Fixing issue with nested spread calls within call expressions --- packages/token-analyzer/README.md | 2 +- .../src/__tests__/test-files/analysis.json | 20 +++++++++++++++++++ packages/token-analyzer/src/astAnalyzer.ts | 6 +++--- 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index 8f7ad9292..e536f8121 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -12,7 +12,7 @@ A static analysis tool that scans your project's style files to track and analyz - run shorthands functions with placeholder functions and parse out which properties are actually being set. This is a special case we will need to hard code and thus should make independent from the rest of our logic so it's easier to modify/remove. - add tests for findTsConfigPath - add tests for structure and output. ~~We're processing the styles but not putting them in the right places right now~~ -- primary styles using createCustomFocusIndicatorStyle with a shorthand as the first arg seems to get missed but the nested styles we catch (investigate) I think this just means we need to ensure we process shorthands here as well +- update contributing doc with info about version management ## Features diff --git a/packages/token-analyzer/src/__tests__/test-files/analysis.json b/packages/token-analyzer/src/__tests__/test-files/analysis.json index 0309ed607..2eb53ac82 100644 --- a/packages/token-analyzer/src/__tests__/test-files/analysis.json +++ b/packages/token-analyzer/src/__tests__/test-files/analysis.json @@ -693,6 +693,26 @@ "nested": { ":focus": { "tokens": [ + { + "property": "borderTopColor", + "token": "tokens.colorStrokeFocus2", + "path": [":focus", "borderTopColor"] + }, + { + "property": "borderRightColor", + "token": "tokens.colorStrokeFocus2", + "path": [":focus", "borderRightColor"] + }, + { + "property": "borderBottomColor", + "token": "tokens.colorStrokeFocus2", + "path": [":focus", "borderBottomColor"] + }, + { + "property": "borderLeftColor", + "token": "tokens.colorStrokeFocus2", + "path": [":focus", "borderLeftColor"] + }, { "property": "boxShadow", "token": "tokens.shadow2", diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index a1b2bf9fd..6a464bfa3 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -179,6 +179,9 @@ function processStyleProperty( nestedModifier, childName, ]); + } else if (Node.isSpreadAssignment(property)) { + // Handle spread elements in object literals within function arguments + processNode(property.getExpression(), [...path, nestedModifier]); } }); } @@ -415,9 +418,6 @@ function createStyleContent(tokens: TokenReference[]): StyleContent { pathIndex: number, currentLevel: StyleTokens ) => { - if (token.path.includes('[`& .${buttonClassNames.icon}`]')) { - console.log(token.path); - } const nestedKey = token.path[pathIndex]; // if no token array exists, create one From 26ba585e6fb88d99f06ac2f96f2b0a9993680684 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 8 Apr 2025 15:45:10 -0700 Subject: [PATCH 17/75] adding make reset styles tests --- .../token-analyzer/src/__tests__/e2e.test.ts | 44 +++++++++++++++++-- 1 file changed, 41 insertions(+), 3 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index 85cd5a631..a24013cbc 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -5,6 +5,8 @@ import { analyzeProjectStyles } from '../index.js'; describe('e2e test', () => { let tempDir: string; let targetPath: string; + let analysis: any; + let styles: any; beforeAll(async () => { // Create temp directory for test files @@ -17,13 +19,49 @@ describe('e2e test', () => { 'test-files', 'analysis.json' ); + + await analyzeProjectStyles(tempDir, targetPath); + await fs + .readFile(path.join(tempDir, 'analysis.json'), 'utf-8') + .then((analysisData) => { + // Parse the JSON data from our analysis and start validating it + analysis = JSON.parse(analysisData); + }); + + styles = analysis['useButtonStyles.styles.ts'].styles; }); afterAll(async () => { // Clean up temp files // await fs.rm(targetPath, { recursive: true, force: true }); }); - test('analyze test button styles', async () => { - await analyzeProjectStyles(tempDir, targetPath); - }, 10000); + + test('validate basic structure', () => { + // Validate the structure of the analysis object + expect(analysis).toHaveProperty(['useButtonStyles.styles.ts']); + + // Validate that we process a makeResetStyles function useRootBaseClassName + expect(styles).toHaveProperty('useRootBaseClassName'); + }); + + describe('validate makeResetStyles tokens', () => { + // Define token cases for makeResetStyles tests + const resetStyleTokenCases = [ + ['backgroundColor', 'tokens.colorNeutralBackground1Hover'], + ['borderColor', 'tokens.colorNeutralStroke1Hover'], + ['color', 'tokens.colorNeutralForeground1Hover'], + ]; + test.each(resetStyleTokenCases)( + '%s token is properly configured', + (propertyName, expectedToken) => { + const hoverMakeResetTokens = + styles.useRootBaseClassName.resetStyles.nested["':hover'"].tokens; + const token = hoverMakeResetTokens.find( + (t: any) => t.property === propertyName + ); + expect(token).toBeDefined(); + expect(token.token).toBe(expectedToken); + } + ); + }); }); From a37f71e02eb38435c38be669ee9a9e7917f7ff53 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 8 Apr 2025 15:54:03 -0700 Subject: [PATCH 18/75] makeStyles tests --- .../token-analyzer/src/__tests__/e2e.test.ts | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index a24013cbc..d8397ddd6 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -64,4 +64,20 @@ describe('e2e test', () => { } ); }); + + describe('validate makeStyles tokens', () => { + // Define token cases for makeResetStyles tests + const makeStylesStyles = [ + ['backgroundColor', 'tokens.colorTransparentBackground'], + ]; + test.each(makeStylesStyles)( + '%s token is properly configured', + (propertyName, expectedToken) => { + const rootOutline = styles.useRootStyles.outline.tokens; + const token = rootOutline.find((t: any) => t.property === propertyName); + expect(token).toBeDefined(); + expect(token.token).toBe(expectedToken); + } + ); + }); }); From 1561e6e0397b14dfeca23f75faa00aa023cf4fa9 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 9 Apr 2025 15:37:18 -0700 Subject: [PATCH 19/75] fixing additional recursion bug --- packages/token-analyzer/src/astAnalyzer.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index 6a464bfa3..bf4186a4f 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -550,7 +550,10 @@ async function analyzeMakeStyles( if (Node.isObjectLiteralExpression(stylesArg)) { // Process the styles object stylesArg.getProperties().forEach((prop) => { - if (Node.isPropertyAssignment(prop)) { + if ( + Node.isPropertyAssignment(prop) || + Node.isSpreadAssignment(prop) + ) { const tokens = processStyleProperty(prop, importedValues, true); if (tokens.length) { const styleContent = createStyleContent(tokens); From 34d2230d70125ac89837a0fadbbbc9e0c1fcc931 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 9 Apr 2025 15:37:32 -0700 Subject: [PATCH 20/75] updating test with new data --- .../src/__tests__/analyzer.test.ts | 2 +- .../src/__tests__/test-files/analysis.json | 34 +++++++++++++++++++ 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/packages/token-analyzer/src/__tests__/analyzer.test.ts b/packages/token-analyzer/src/__tests__/analyzer.test.ts index 3a3af15d8..3b1d6227d 100644 --- a/packages/token-analyzer/src/__tests__/analyzer.test.ts +++ b/packages/token-analyzer/src/__tests__/analyzer.test.ts @@ -65,7 +65,7 @@ describe('Token Analyzer', () => { const focusStyle = styles.useStyles.focusIndicator.nested?.[':focus']; expect(focusStyle?.tokens[0]).toEqual({ - path: [], + path: [':focus', 'textDecorationColor'], property: 'textDecorationColor', token: 'tokens.colorStrokeFocus2', }); diff --git a/packages/token-analyzer/src/__tests__/test-files/analysis.json b/packages/token-analyzer/src/__tests__/test-files/analysis.json index 2eb53ac82..40eb93cad 100644 --- a/packages/token-analyzer/src/__tests__/test-files/analysis.json +++ b/packages/token-analyzer/src/__tests__/test-files/analysis.json @@ -104,6 +104,40 @@ } ] }, + ":focus": { + "tokens": [ + { + "property": "borderColor", + "token": "tokens.colorStrokeFocus2", + "path": [":focus", "borderColor"] + }, + { + "property": "borderRadius", + "token": "tokens.borderRadiusMedium", + "path": [":focus", "borderRadius"] + }, + { + "property": "outline", + "token": "tokens.strokeWidthThick", + "path": [":focus", "outline"] + }, + { + "property": "outline", + "token": "tokens.colorTransparentStroke", + "path": [":focus", "outline"] + }, + { + "property": "boxShadow", + "token": "tokens.strokeWidthThin", + "path": [":focus", "boxShadow"] + }, + { + "property": "boxShadow", + "token": "tokens.colorStrokeFocus2", + "path": [":focus", "boxShadow"] + } + ] + }, "'@supports (-moz-appearance:button)'": { "tokens": [], "nested": { From bf1476dac409115e33cb7385822991c9084be7e6 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 9 Apr 2025 15:37:44 -0700 Subject: [PATCH 21/75] expanding tests to cover makeResetStyles --- .../token-analyzer/src/__tests__/e2e.test.ts | 120 +++++++++++++++--- 1 file changed, 101 insertions(+), 19 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index d8397ddd6..1c802dc24 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -44,25 +44,107 @@ describe('e2e test', () => { expect(styles).toHaveProperty('useRootBaseClassName'); }); - describe('validate makeResetStyles tokens', () => { - // Define token cases for makeResetStyles tests - const resetStyleTokenCases = [ - ['backgroundColor', 'tokens.colorNeutralBackground1Hover'], - ['borderColor', 'tokens.colorNeutralStroke1Hover'], - ['color', 'tokens.colorNeutralForeground1Hover'], - ]; - test.each(resetStyleTokenCases)( + /** + * Factory function that outputs a function we can test against. This defines the token test params + * that we will reuse across our tests. Note that this function must be called within the test.each() function + * to ensure that the test context is properly set up. + * @param tokenArray an array of tokens to pass into our factory + * @returns a function we will call within test.each() + */ + const tokenTestFactory = (tokenArray: any) => { + return (propertyName: string, expectedToken: string) => { + const token = tokenArray.some( + (t: any) => t.property === propertyName && t.token === expectedToken + ); + expect(token).toBeTruthy(); + }; + }; + + /** + * Reusable function to check tokens vs a known set + * @param tokenArray the token array to test. Must be in the form of a function due to the lifecycle of Jest + * @param testArray the known set of tokens are we looking for + */ + const checkTokens = (tokenArray: () => any[], testArray: any[]) => { + test.each(testArray)( '%s token is properly configured', (propertyName, expectedToken) => { - const hoverMakeResetTokens = - styles.useRootBaseClassName.resetStyles.nested["':hover'"].tokens; - const token = hoverMakeResetTokens.find( - (t: any) => t.property === propertyName - ); - expect(token).toBeDefined(); - expect(token.token).toBe(expectedToken); + tokenTestFactory(tokenArray())(propertyName, expectedToken); } ); + + // Check if the length of the token array matches the expected length + test(`token array length should be ${testArray.length}`, () => { + expect(tokenArray().length).toBe(testArray.length); + }); + }; + + describe('validate makeResetStyles tokens', () => { + // Define token cases for hover makeResetStyles tests + checkTokens( + () => styles.useRootBaseClassName.resetStyles.nested["':hover'"].tokens, + [ + ['backgroundColor', 'tokens.colorNeutralBackground1Hover'], + ['borderColor', 'tokens.colorNeutralStroke1Hover'], + ['color', 'tokens.colorNeutralForeground1Hover'], + ] + ); + + // Define token cases for active hover makeResetStyles tests + checkTokens( + () => + styles.useRootBaseClassName.resetStyles.nested["':hover:active'"] + .tokens, + [ + ['backgroundColor', 'tokens.colorNeutralBackground1Pressed'], + ['borderColor', 'tokens.colorNeutralStroke1Pressed'], + ['color', 'tokens.colorNeutralForeground1Pressed'], + ] + ); + + // base makeResetStyles tests + checkTokens( + () => styles.useRootBaseClassName.resetStyles.tokens, + [ + ['backgroundColor', 'tokens.colorNeutralBackground1'], + ['color', 'tokens.colorNeutralForeground1'], + ['border', 'tokens.strokeWidthThin'], + ['border', 'tokens.colorNeutralStroke1'], + ['fontFamily', 'tokens.fontFamilyBase'], + ['padding', 'tokens.spacingHorizontalM'], + ['borderRadius', 'tokens.borderRadiusMedium'], + ['fontSize', 'tokens.fontSizeBase300'], + ['fontWeight', 'tokens.fontWeightSemibold'], + ['lineHeight', 'tokens.lineHeightBase300'], + ['transitionDuration', 'tokens.durationFaster'], + ['transitionTimingFunction', 'tokens.curveEasyEase'], + ] + ); + + // Token cases for makeResetStyles focus + checkTokens( + () => styles.useRootBaseClassName.resetStyles.nested[':focus'].tokens, + [ + ['borderColor', 'tokens.colorStrokeFocus2'], + ['borderRadius', 'tokens.borderRadiusMedium'], + ['outline', 'tokens.strokeWidthThick'], + ['outline', 'tokens.strokeWidthThick'], + ['boxShadow', 'tokens.strokeWidthThin'], + ['boxShadow', 'tokens.colorStrokeFocus2'], + ] + ); + + // Token cases for makeResetStyles mozilla bug + checkTokens( + () => + styles.useRootBaseClassName.resetStyles.nested[ + "'@supports (-moz-appearance:button)'" + ].nested[':focus'].tokens, + [ + ['boxShadow', 'tokens.colorStrokeFocus2'], + ['boxShadow', 'tokens.strokeWidthThin'], + ] + ); }); describe('validate makeStyles tokens', () => { @@ -73,10 +155,10 @@ describe('e2e test', () => { test.each(makeStylesStyles)( '%s token is properly configured', (propertyName, expectedToken) => { - const rootOutline = styles.useRootStyles.outline.tokens; - const token = rootOutline.find((t: any) => t.property === propertyName); - expect(token).toBeDefined(); - expect(token.token).toBe(expectedToken); + tokenTestFactory(styles.useRootStyles.outline.tokens)( + propertyName, + expectedToken + ); } ); }); From b5391f12276956dc1d3eb4857ec06290a45d0d87 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 10 Apr 2025 02:06:03 -0700 Subject: [PATCH 22/75] update logic in extractTokensFromText to get manage variable declarations --- packages/token-analyzer/README.md | 2 + .../src/__tests__/test-files/analysis.json | 15 ++++ packages/token-analyzer/src/tokenUtils.ts | 69 +++++++++++++++---- 3 files changed, 74 insertions(+), 12 deletions(-) diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index e536f8121..f1068259b 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -11,8 +11,10 @@ A static analysis tool that scans your project's style files to track and analyz - add config to point to custom prettier config for file output. - run shorthands functions with placeholder functions and parse out which properties are actually being set. This is a special case we will need to hard code and thus should make independent from the rest of our logic so it's easier to modify/remove. - add tests for findTsConfigPath +- Update extractTokensFromText to find imported vars and tokens. We've updated it to resolve variable declarations thusfar but there's potential cases where we could have imports impact this as well. - add tests for structure and output. ~~We're processing the styles but not putting them in the right places right now~~ - update contributing doc with info about version management +- Dedupe logic from extractTokensFromText and isTokenReference ## Features diff --git a/packages/token-analyzer/src/__tests__/test-files/analysis.json b/packages/token-analyzer/src/__tests__/test-files/analysis.json index 40eb93cad..51f8d7f37 100644 --- a/packages/token-analyzer/src/__tests__/test-files/analysis.json +++ b/packages/token-analyzer/src/__tests__/test-files/analysis.json @@ -143,6 +143,11 @@ "nested": { ":focus": { "tokens": [ + { + "property": "boxShadow", + "token": "tokens.strokeWidthThin", + "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] + }, { "property": "boxShadow", "token": "tokens.colorStrokeFocus2", @@ -825,6 +830,11 @@ "token": "tokens.shadow2", "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] }, + { + "property": "boxShadow", + "token": "tokens.strokeWidthThin", + "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] + }, { "property": "boxShadow", "token": "tokens.colorStrokeFocus2", @@ -849,6 +859,11 @@ "token": "tokens.shadow2", "path": ["'@supports (-moz-appearance:button)'", ":focus", "':hover'", "boxShadow"] }, + { + "property": "boxShadow", + "token": "tokens.strokeWidthThin", + "path": ["'@supports (-moz-appearance:button)'", ":focus", "':hover'", "boxShadow"] + }, { "property": "boxShadow", "token": "tokens.colorStrokeFocus2", diff --git a/packages/token-analyzer/src/tokenUtils.ts b/packages/token-analyzer/src/tokenUtils.ts index 12cf9b02e..24bfabbf0 100644 --- a/packages/token-analyzer/src/tokenUtils.ts +++ b/packages/token-analyzer/src/tokenUtils.ts @@ -1,5 +1,5 @@ // tokenUtils.ts -import { Node, Symbol } from 'ts-morph'; +import { Node, Symbol, SyntaxKind } from 'ts-morph'; import { TOKEN_REGEX } from './types.js'; /** @@ -35,18 +35,43 @@ export function isTokenReference(textOrNode: string | Node | Symbol): boolean { /** * Extracts all token references from a text string or Node + * TODO: Dedupe logic from extractTokensFromText and isTokenReference * @param textOrNode The text or Node to extract tokens from * @returns Array of token reference strings */ -export function extractTokensFromText(textOrNode: string | Node | Symbol): string[] { +export function extractTokensFromText( + textOrNode: string | Node | Symbol +): string[] { // If we have a Node or Symbol, extract the text to check - let text: string; + let text: string | undefined; + const matches: string[] = []; if (typeof textOrNode === 'string') { text = textOrNode; + } else if (Node.isNode(textOrNode) && Node.isTemplateExpression(textOrNode)) { + textOrNode.getTemplateSpans().forEach((span) => { + if (isTokenReference(span.getExpression().getText())) { + const token = span.getExpression().getText(); + matches.push(token); + } else { + const spanExpression = span.getExpression(); + if (spanExpression.getKind() === SyntaxKind.Identifier) { + const spanSymbol = spanExpression.getSymbol(); + const spanDeclarations = spanSymbol?.getDeclarations(); + if (spanSymbol && spanDeclarations && spanDeclarations.length > 0) { + if (Node.isVariableDeclaration(spanDeclarations[0])) { + const spanInitializer = spanDeclarations[0].getInitializer(); + if (spanInitializer) { + matches.push(...extractTokensFromText(spanInitializer)); + } + } + } + } + } + }); } else if (Node.isNode(textOrNode)) { text = textOrNode.getText(); - } else if (textOrNode instanceof Symbol) { + } else { // For symbols, we need to check the declarations const declarations = textOrNode.getDeclarations(); if (!declarations || declarations.length === 0) { @@ -55,12 +80,15 @@ export function extractTokensFromText(textOrNode: string | Node | Symbol): strin // Get text from the first declaration text = declarations[0].getText(); - } else { - return []; } - const matches = text.match(TOKEN_REGEX); - return matches || []; + if (text !== undefined) { + const regMatch = text.match(TOKEN_REGEX); + if (regMatch) { + matches.push(...regMatch); + } + } + return matches; } /** @@ -71,9 +99,19 @@ export function extractTokensFromText(textOrNode: string | Node | Symbol): strin export function getPropertiesForShorthand(functionName: string): string[] { const shorthandMap: Record = { // Border shorthands - borderColor: ['borderTopColor', 'borderRightColor', 'borderBottomColor', 'borderLeftColor'], + borderColor: [ + 'borderTopColor', + 'borderRightColor', + 'borderBottomColor', + 'borderLeftColor', + ], border: ['borderWidth', 'borderStyle', 'borderColor'], - borderRadius: ['borderTopLeftRadius', 'borderTopRightRadius', 'borderBottomRightRadius', 'borderBottomLeftRadius'], + borderRadius: [ + 'borderTopLeftRadius', + 'borderTopRightRadius', + 'borderBottomRightRadius', + 'borderBottomLeftRadius', + ], // Padding/margin shorthands padding: ['paddingTop', 'paddingRight', 'paddingBottom', 'paddingLeft'], @@ -83,12 +121,19 @@ export function getPropertiesForShorthand(functionName: string): string[] { flex: ['flexGrow', 'flexShrink', 'flexBasis'], gap: ['rowGap', 'columnGap'], overflow: ['overflowX', 'overflowY'], - gridArea: ['gridRowStart', 'gridColumnStart', 'gridRowEnd', 'gridColumnEnd'], + gridArea: [ + 'gridRowStart', + 'gridColumnStart', + 'gridRowEnd', + 'gridColumnEnd', + ], inset: ['top', 'right', 'bottom', 'left'], }; // Extract base function name if it's a qualified name (e.g., shorthands.borderColor -> borderColor) - const baseName = functionName.includes('.') ? functionName.split('.').pop() : functionName; + const baseName = functionName.includes('.') + ? functionName.split('.').pop() + : functionName; return baseName && shorthandMap[baseName!] ? shorthandMap[baseName!] : []; } From 8645435418444570ff17ccc4e5a21bfc925c4481 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 10 Apr 2025 02:13:48 -0700 Subject: [PATCH 23/75] update todos --- packages/token-analyzer/README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index f1068259b..a8940bcc0 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -7,7 +7,6 @@ A static analysis tool that scans your project's style files to track and analyz - we also need to ensure var analysis is done correctly after the refactor - Convert token member within the analysis output to an array so we can hold multiple tokens. The order should be the order or priority. [0] being the highest pri with the last item in the array the least prioritized. -- Add makeResetStyles specific tests in analyzer to ensure we process those correctly. - add config to point to custom prettier config for file output. - run shorthands functions with placeholder functions and parse out which properties are actually being set. This is a special case we will need to hard code and thus should make independent from the rest of our logic so it's easier to modify/remove. - add tests for findTsConfigPath From ee124cbb892b6bbb06f04e1de59a56deb28d8585 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 10 Apr 2025 14:37:59 -0700 Subject: [PATCH 24/75] remove processNodeForAffectedProperties Process output of shorthands functions and map tokens from output add more complex shorthand case update tests --- .../token-analyzer/src/__tests__/e2e.test.ts | 19 +--- .../src/__tests__/test-files/analysis.json | 6 +- .../test-files/useButtonStyles.styles.ts | 9 +- packages/token-analyzer/src/astAnalyzer.ts | 100 +++-------------- packages/token-analyzer/src/tokenUtils.ts | 106 ++++++++++-------- 5 files changed, 95 insertions(+), 145 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index 1c802dc24..58138018d 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -8,6 +8,8 @@ describe('e2e test', () => { let analysis: any; let styles: any; + // generate our analysis file before all our tests run. Additionally, we set a long timeout + // to ensure that we have enough time to run the analysis. beforeAll(async () => { // Create temp directory for test files tempDir = path.join(process.cwd(), 'src', '__tests__', 'test-files'); @@ -29,7 +31,7 @@ describe('e2e test', () => { }); styles = analysis['useButtonStyles.styles.ts'].styles; - }); + }, 100000); afterAll(async () => { // Clean up temp files @@ -148,18 +150,9 @@ describe('e2e test', () => { }); describe('validate makeStyles tokens', () => { - // Define token cases for makeResetStyles tests - const makeStylesStyles = [ - ['backgroundColor', 'tokens.colorTransparentBackground'], - ]; - test.each(makeStylesStyles)( - '%s token is properly configured', - (propertyName, expectedToken) => { - tokenTestFactory(styles.useRootStyles.outline.tokens)( - propertyName, - expectedToken - ); - } + checkTokens( + () => styles.useRootStyles.outline.tokens, + [['backgroundColor', 'tokens.colorTransparentBackground']] ); }); }); diff --git a/packages/token-analyzer/src/__tests__/test-files/analysis.json b/packages/token-analyzer/src/__tests__/test-files/analysis.json index 51f8d7f37..66d0800f7 100644 --- a/packages/token-analyzer/src/__tests__/test-files/analysis.json +++ b/packages/token-analyzer/src/__tests__/test-files/analysis.json @@ -739,17 +739,17 @@ }, { "property": "borderRightColor", - "token": "tokens.colorStrokeFocus2", + "token": "tokens.colorStrokeFocus1", "path": [":focus", "borderRightColor"] }, { "property": "borderBottomColor", - "token": "tokens.colorStrokeFocus2", + "token": "tokens.borderRadiusCircular", "path": [":focus", "borderBottomColor"] }, { "property": "borderLeftColor", - "token": "tokens.colorStrokeFocus2", + "token": "tokens.colorStrokeFocus1", "path": [":focus", "borderLeftColor"] }, { diff --git a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts index b18d6ee42..9c4c1a11e 100644 --- a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts +++ b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts @@ -20,6 +20,8 @@ export const buttonClassNames: SlotClassNames = { const iconSpacingVar = '--fui-Button__icon--spacing'; +const tokenInInitializer = tokens.borderRadiusCircular; + const buttonSpacingSmall = '3px'; const buttonSpacingSmallWithIcon = '1px'; const buttonSpacingMedium = '5px'; @@ -470,7 +472,12 @@ const useRootFocusStyles = makeStyles({ // Primary styles primary: { ...createCustomFocusIndicatorStyle({ - ...shorthands.borderColor(tokens.colorStrokeFocus2), + // added another color here to test the shorthands output. + ...shorthands.borderColor( + tokens.colorStrokeFocus2, + tokens.colorStrokeFocus1, + tokenInInitializer + ), boxShadow: `${tokens.shadow2}, 0 0 0 ${tokens.strokeWidthThin} ${tokens.colorStrokeFocus2} inset, 0 0 0 ${tokens.strokeWidthThick} ${tokens.colorNeutralForegroundOnBrand} inset`, ':hover': { boxShadow: `${tokens.shadow2}, 0 0 0 ${tokens.strokeWidthThin} ${tokens.colorStrokeFocus2} inset`, diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index bf4186a4f..d446d9594 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -188,13 +188,26 @@ function processStyleProperty( } else if (Node.isCallExpression(node)) { // Process calls like shorthands.borderColor(tokens.color) const functionName = node.getExpression().getText(); - // we should pass the number of arguments so we can properly map which overload is being called. - const affectedProperties = getPropertiesForShorthand(functionName); + // check if we're using a shorthand function and get the output of a call based on parameters passed into the function + const affectedProperties = getPropertiesForShorthand( + functionName, + node.getArguments() + ); + + // If we have a shorthand function, we need to process the affected properties. + // getPropertiesForShorthand will return an array of objects + // with the property name and the token reference + // e.g. { property: 'borderColor', token: 'tokens.color' } + // It will also deeply check for initialized values etc and validate they are tokens if (affectedProperties.length > 0) { // Process each argument and apply it to all affected properties - node.getArguments().forEach((argument) => { - processNodeForAffectedProperties(argument, affectedProperties, path); + affectedProperties.forEach((argument) => { + tokens.push({ + property: argument.property, + token: argument.token, + path: path.concat(argument.property), + }); }); } else { // Generic handling of functions that are not whitelisted @@ -229,85 +242,6 @@ function processStyleProperty( } } - // Helper function to process nodes for multiple affected properties - function processNodeForAffectedProperties( - node: Node, - properties: string[], - basePath: string[] - ): void { - if (!node) { - return; - } - - // If this is a direct token reference - if (Node.isPropertyAccessExpression(node) && isTokenReference(node)) { - properties.forEach((property) => { - tokens.push({ - property, - token: node.getText(), - path: basePath.concat(property), - }); - }); - return; - } - - // If this is an identifier that might be a variable - if ( - Node.isIdentifier(node) && - importedValues && - importedValues.has(node.getText()) - ) { - properties.forEach((property) => { - const importTokens = processImportedStringTokens( - importedValues, - property, - node.getText(), - basePath, - TOKEN_REGEX - ); - tokens.push(...importTokens); - }); - return; - } - - // For other node types, process them normally but with each property - if (Node.isStringLiteral(node) || Node.isTemplateExpression(node)) { - const text = node.getText().replace(/['"]/g, ''); - - // Check for tokens in the text - const matches = extractTokensFromText(node); - if (matches.length > 0) { - properties.forEach((property) => { - matches.forEach((match) => { - tokens.push({ - property, - token: match, - path: basePath, - }); - }); - }); - } - - // Check for CSS vars - if (text.includes('var(')) { - properties.forEach((property) => { - const cssVarTokens = extractTokensFromCssVars( - text, - property, - basePath, - TOKEN_REGEX - ); - tokens.push(...cssVarTokens); - }); - } - } - - // For any other complex expressions, process them normally - else { - processNode(node, basePath); - } - } - if (Node.isPropertyAssignment(prop)) { const initializer = prop.getInitializer(); if (initializer) { diff --git a/packages/token-analyzer/src/tokenUtils.ts b/packages/token-analyzer/src/tokenUtils.ts index 24bfabbf0..c745df830 100644 --- a/packages/token-analyzer/src/tokenUtils.ts +++ b/packages/token-analyzer/src/tokenUtils.ts @@ -1,6 +1,7 @@ // tokenUtils.ts import { Node, Symbol, SyntaxKind } from 'ts-morph'; import { TOKEN_REGEX } from './types.js'; +import { shorthands } from '@griffel/react'; /** * Centralizes token detection logic to make future changes easier @@ -33,6 +34,19 @@ export function isTokenReference(textOrNode: string | Node | Symbol): boolean { return test; } +export function getExpresionFromIdentifier(node: Node): Node | undefined { + const nodeSymbol = node.getSymbol(); + const nodeDeclarations = nodeSymbol?.getDeclarations(); + if (nodeSymbol && nodeDeclarations && nodeDeclarations.length > 0) { + if (Node.isVariableDeclaration(nodeDeclarations[0])) { + const nodeInitializer = nodeDeclarations[0].getInitializer(); + if (nodeInitializer) { + return nodeInitializer; + } + } + } +} + /** * Extracts all token references from a text string or Node * TODO: Dedupe logic from extractTokensFromText and isTokenReference @@ -56,21 +70,22 @@ export function extractTokensFromText( } else { const spanExpression = span.getExpression(); if (spanExpression.getKind() === SyntaxKind.Identifier) { - const spanSymbol = spanExpression.getSymbol(); - const spanDeclarations = spanSymbol?.getDeclarations(); - if (spanSymbol && spanDeclarations && spanDeclarations.length > 0) { - if (Node.isVariableDeclaration(spanDeclarations[0])) { - const spanInitializer = spanDeclarations[0].getInitializer(); - if (spanInitializer) { - matches.push(...extractTokensFromText(spanInitializer)); - } - } + const spanInitializer = getExpresionFromIdentifier(spanExpression); + if (spanInitializer) { + matches.push(...extractTokensFromText(spanInitializer)); } } } }); } else if (Node.isNode(textOrNode)) { - text = textOrNode.getText(); + if (Node.isIdentifier(textOrNode)) { + const initializer = getExpresionFromIdentifier(textOrNode); + if (initializer) { + matches.push(...extractTokensFromText(initializer)); + } + } else { + text = textOrNode.getText(); + } } else { // For symbols, we need to check the declarations const declarations = textOrNode.getDeclarations(); @@ -91,49 +106,50 @@ export function extractTokensFromText( return matches; } +type FunctionParams = T extends (...args: infer P) => any ? P : never; /** * Maps shorthand function names to the CSS properties they affect * @param functionName The name of the shorthand function (e.g., "borderColor" or "shorthands.borderColor") * @returns Array of CSS property names affected by this shorthand */ -export function getPropertiesForShorthand(functionName: string): string[] { - const shorthandMap: Record = { - // Border shorthands - borderColor: [ - 'borderTopColor', - 'borderRightColor', - 'borderBottomColor', - 'borderLeftColor', - ], - border: ['borderWidth', 'borderStyle', 'borderColor'], - borderRadius: [ - 'borderTopLeftRadius', - 'borderTopRightRadius', - 'borderBottomRightRadius', - 'borderBottomLeftRadius', - ], - - // Padding/margin shorthands - padding: ['paddingTop', 'paddingRight', 'paddingBottom', 'paddingLeft'], - margin: ['marginTop', 'marginRight', 'marginBottom', 'marginLeft'], - - // Other common shorthands - flex: ['flexGrow', 'flexShrink', 'flexBasis'], - gap: ['rowGap', 'columnGap'], - overflow: ['overflowX', 'overflowY'], - gridArea: [ - 'gridRowStart', - 'gridColumnStart', - 'gridRowEnd', - 'gridColumnEnd', - ], - inset: ['top', 'right', 'bottom', 'left'], - }; - +export function getPropertiesForShorthand( + functionName: string, + args: Node[] +): { property: string; token: string }[] { // Extract base function name if it's a qualified name (e.g., shorthands.borderColor -> borderColor) const baseName = functionName.includes('.') ? functionName.split('.').pop() : functionName; - return baseName && shorthandMap[baseName!] ? shorthandMap[baseName!] : []; + const cleanFunctionName = baseName as keyof typeof shorthands; + const shorthandFunction = shorthands[cleanFunctionName]; + if (shorthandFunction) { + const argValues = args.map( + (arg) => extractTokensFromText(arg)[0] + ) as FunctionParams; + + console.log(args.map((arg) => extractTokensFromText(arg)[0])); + // @ts-expect-error We have a very complex union type that is difficult/impossible to resolve statically. + const shortHandOutput = shorthandFunction(...argValues); + console.log(shortHandOutput); + + // Once we have the shorthand output, we should process the values, sanitize them and then return only the properties + // that contain tokens. + const shortHandTokens: { property: string; token: string }[] = []; + + Object.keys(shortHandOutput).forEach((key) => { + const value = shortHandOutput[key]; + if (isTokenReference(value)) { + shortHandTokens.push({ + property: key, + token: value, + }); + } + }); + + return shortHandTokens; + } + + // The function didn't match any known shorthand functions so return an empty array. + return []; } From 5d87edac7a2da7785d53928eded63a6038abf197 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 10 Apr 2025 15:55:55 -0700 Subject: [PATCH 25/75] fix typings and update comments --- packages/token-analyzer/src/tokenUtils.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/token-analyzer/src/tokenUtils.ts b/packages/token-analyzer/src/tokenUtils.ts index c745df830..bbe824fea 100644 --- a/packages/token-analyzer/src/tokenUtils.ts +++ b/packages/token-analyzer/src/tokenUtils.ts @@ -49,7 +49,6 @@ export function getExpresionFromIdentifier(node: Node): Node | undefined { /** * Extracts all token references from a text string or Node - * TODO: Dedupe logic from extractTokensFromText and isTokenReference * @param textOrNode The text or Node to extract tokens from * @returns Array of token reference strings */ @@ -78,6 +77,7 @@ export function extractTokensFromText( } }); } else if (Node.isNode(textOrNode)) { + // If we have an identifier, we need to check if it has an initializer. From there we should reprocess to extract tokens if (Node.isIdentifier(textOrNode)) { const initializer = getExpresionFromIdentifier(textOrNode); if (initializer) { @@ -125,20 +125,19 @@ export function getPropertiesForShorthand( const shorthandFunction = shorthands[cleanFunctionName]; if (shorthandFunction) { const argValues = args.map( + // We have to extract the token from the argument in the case that there's a template literal, initializer, etc. (arg) => extractTokensFromText(arg)[0] ) as FunctionParams; - console.log(args.map((arg) => extractTokensFromText(arg)[0])); // @ts-expect-error We have a very complex union type that is difficult/impossible to resolve statically. const shortHandOutput = shorthandFunction(...argValues); - console.log(shortHandOutput); // Once we have the shorthand output, we should process the values, sanitize them and then return only the properties // that contain tokens. const shortHandTokens: { property: string; token: string }[] = []; Object.keys(shortHandOutput).forEach((key) => { - const value = shortHandOutput[key]; + const value = shortHandOutput[key as keyof typeof shortHandOutput]; if (isTokenReference(value)) { shortHandTokens.push({ property: key, From 29f34540b1e0ed082c032ac7e7afcbfbbc99d629 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 10 Apr 2025 16:12:23 -0700 Subject: [PATCH 26/75] updates to package.json --- packages/token-analyzer/package.json | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/token-analyzer/package.json b/packages/token-analyzer/package.json index b4f2ee84e..7e8de46c5 100644 --- a/packages/token-analyzer/package.json +++ b/packages/token-analyzer/package.json @@ -6,12 +6,14 @@ "dependencies": { "ts-morph": "^24.0.0", "typescript": "5.7.3", - "prettier": "^2.6.2" + "prettier": "^2.6.2", + "@griffel/react": "^1.5.22" }, "scripts": { "analyze-tokens": "NODE_OPTIONS=\"--loader ts-node/esm\" ts-node-esm src/index.ts", "test": "jest", "test:debug": "node --loader ts-node/esm --inspect-brk node_modules/.bin/jest --runInBand" }, - "private": true + "private": true, + "type": "module" } From f0e8d1d31b6dadf44225b552e3b3dd848f94f963 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 10 Apr 2025 18:30:13 -0700 Subject: [PATCH 27/75] small fixes --- packages/token-analyzer/README.md | 1 - packages/token-analyzer/src/index.ts | 2 +- packages/token-analyzer/tsconfig.json | 5 ++++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index a8940bcc0..1a445da55 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -8,7 +8,6 @@ A static analysis tool that scans your project's style files to track and analyz - Convert token member within the analysis output to an array so we can hold multiple tokens. The order should be the order or priority. [0] being the highest pri with the last item in the array the least prioritized. - add config to point to custom prettier config for file output. -- run shorthands functions with placeholder functions and parse out which properties are actually being set. This is a special case we will need to hard code and thus should make independent from the rest of our logic so it's easier to modify/remove. - add tests for findTsConfigPath - Update extractTokensFromText to find imported vars and tokens. We've updated it to resolve variable declarations thusfar but there's potential cases where we could have imports impact this as well. - add tests for structure and output. ~~We're processing the styles but not putting them in the right places right now~~ diff --git a/packages/token-analyzer/src/index.ts b/packages/token-analyzer/src/index.ts index 339d330ed..4ea3a3756 100644 --- a/packages/token-analyzer/src/index.ts +++ b/packages/token-analyzer/src/index.ts @@ -6,7 +6,7 @@ import { findStyleFiles } from './fileOperations.js'; import { analyzeFile } from './astAnalyzer.js'; import { AnalysisResults, FileAnalysis } from './types.js'; import { configure, log, error, measureAsync } from './debugUtils.js'; -import { findTsConfigPath } from './findTsConfigPath'; +import { findTsConfigPath } from './findTsConfigPath.js'; async function analyzeProjectStyles( rootDir: string, diff --git a/packages/token-analyzer/tsconfig.json b/packages/token-analyzer/tsconfig.json index a5d448b2f..2c11b804e 100644 --- a/packages/token-analyzer/tsconfig.json +++ b/packages/token-analyzer/tsconfig.json @@ -2,7 +2,10 @@ "extends": "../../tsconfig.base.json", "files": [], "compilerOptions": { - "jsx": "react" + "jsx": "react", + "esModuleInterop": true, + "module": "ESNext", // or "ES2020", "ES2022" + "moduleResolution": "node" }, "include": [], "references": [ From b12cb769ba810489e7443515fe81e50b3881b81e Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 10 Apr 2025 18:33:01 -0700 Subject: [PATCH 28/75] remove usage of process.cwd --- packages/token-analyzer/package.json | 3 +-- packages/token-analyzer/src/__tests__/analyzer.test.ts | 4 ++-- packages/token-analyzer/src/__tests__/e2e.test.ts | 10 ++-------- packages/token-analyzer/src/findTsConfigPath.ts | 2 +- 4 files changed, 6 insertions(+), 13 deletions(-) diff --git a/packages/token-analyzer/package.json b/packages/token-analyzer/package.json index 7e8de46c5..eac770586 100644 --- a/packages/token-analyzer/package.json +++ b/packages/token-analyzer/package.json @@ -14,6 +14,5 @@ "test": "jest", "test:debug": "node --loader ts-node/esm --inspect-brk node_modules/.bin/jest --runInBand" }, - "private": true, - "type": "module" + "private": true } diff --git a/packages/token-analyzer/src/__tests__/analyzer.test.ts b/packages/token-analyzer/src/__tests__/analyzer.test.ts index 3b1d6227d..718523862 100644 --- a/packages/token-analyzer/src/__tests__/analyzer.test.ts +++ b/packages/token-analyzer/src/__tests__/analyzer.test.ts @@ -11,7 +11,7 @@ describe('Token Analyzer', () => { beforeAll(async () => { // Create temp directory for test files - const tempDir = path.join(process.cwd(), 'temp-test-files'); + const tempDir = path.join(__dirname, 'temp-test-files'); await fs.mkdir(tempDir, { recursive: true }); tempFilePath = path.join(tempDir, 'test-styles.ts'); await fs.writeFile(tempFilePath, sampleStyles); @@ -25,7 +25,7 @@ describe('Token Analyzer', () => { afterAll(async () => { // Cleanup temp files - const tempDir = path.join(process.cwd(), 'temp-test-files'); + const tempDir = path.join(__dirname, 'temp-test-files'); await fs.rm(tempDir, { recursive: true, force: true }); }); diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index 58138018d..b3712b2b8 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -12,15 +12,9 @@ describe('e2e test', () => { // to ensure that we have enough time to run the analysis. beforeAll(async () => { // Create temp directory for test files - tempDir = path.join(process.cwd(), 'src', '__tests__', 'test-files'); + tempDir = path.join(__dirname, 'test-files'); await fs.mkdir(tempDir, { recursive: true }); - targetPath = path.join( - process.cwd(), - 'src', - '__tests__', - 'test-files', - 'analysis.json' - ); + targetPath = path.join(__dirname, 'test-files', 'analysis.json'); await analyzeProjectStyles(tempDir, targetPath); await fs diff --git a/packages/token-analyzer/src/findTsConfigPath.ts b/packages/token-analyzer/src/findTsConfigPath.ts index f914a1de2..53c652ade 100644 --- a/packages/token-analyzer/src/findTsConfigPath.ts +++ b/packages/token-analyzer/src/findTsConfigPath.ts @@ -1,7 +1,7 @@ import * as path from 'path'; import * as fs from 'fs'; -export function findTsConfigPath(startDir = process.cwd()): string | null { +export function findTsConfigPath(startDir = __dirname): string | null { let currentDir = startDir; const root = path.parse(currentDir).root; From b4f022866420fcc7d0da7bd661fd26f2c104f986 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 11 Apr 2025 15:42:46 -0700 Subject: [PATCH 29/75] add small test --- packages/token-analyzer/src/__tests__/e2e.test.ts | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index b3712b2b8..06a53775d 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -2,6 +2,8 @@ import * as path from 'path'; import * as fs from 'fs/promises'; import { analyzeProjectStyles } from '../index.js'; +const styleFileName = 'useButtonStyles.styles.ts'; + describe('e2e test', () => { let tempDir: string; let targetPath: string; @@ -24,7 +26,7 @@ describe('e2e test', () => { analysis = JSON.parse(analysisData); }); - styles = analysis['useButtonStyles.styles.ts'].styles; + styles = analysis[styleFileName].styles; }, 100000); afterAll(async () => { @@ -34,10 +36,12 @@ describe('e2e test', () => { test('validate basic structure', () => { // Validate the structure of the analysis object - expect(analysis).toHaveProperty(['useButtonStyles.styles.ts']); + expect(analysis).toHaveProperty([styleFileName]); // Validate that we process a makeResetStyles function useRootBaseClassName expect(styles).toHaveProperty('useRootBaseClassName'); + + expect(analysis[styleFileName]).toHaveProperty('metadata'); }); /** From 3549e8951ff1d4ec479312142f287edf6ea8b871 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 17 Apr 2025 09:12:35 -0700 Subject: [PATCH 30/75] partial change --- packages/token-analyzer/README.md | 7 ++++++ packages/token-analyzer/src/tokenUtils.ts | 28 ++++++++++++++++++++++- packages/token-analyzer/src/types.ts | 2 +- 3 files changed, 35 insertions(+), 2 deletions(-) diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index 1a445da55..c955d68da 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -13,6 +13,13 @@ A static analysis tool that scans your project's style files to track and analyz - add tests for structure and output. ~~We're processing the styles but not putting them in the right places right now~~ - update contributing doc with info about version management - Dedupe logic from extractTokensFromText and isTokenReference +- Add token test that determines which package tokens come from. IE (@fluentui/tokens or @fluentui/semantic-tokens) +- Data Flow + - find all styles files + - get all imports, analyze them for token references or values, return them to the main script flow + - process merge styles and map meta data to styles + - parse through each property of styles + - with each property,we should look at whether an initializer is there, spread, delaration etc and then determine if that's a token. if it is, we also need to see if there's a fallback chain and not just log a token but also log the tokens in the right order (this should also open the door to ensure we don't over complicate or duplicate logic here) ## Features diff --git a/packages/token-analyzer/src/tokenUtils.ts b/packages/token-analyzer/src/tokenUtils.ts index bbe824fea..e129e4e6b 100644 --- a/packages/token-analyzer/src/tokenUtils.ts +++ b/packages/token-analyzer/src/tokenUtils.ts @@ -1,6 +1,6 @@ // tokenUtils.ts import { Node, Symbol, SyntaxKind } from 'ts-morph'; -import { TOKEN_REGEX } from './types.js'; +import { TOKEN_REGEX, TokenReference } from './types.js'; import { shorthands } from '@griffel/react'; /** @@ -152,3 +152,29 @@ export function getPropertiesForShorthand( // The function didn't match any known shorthand functions so return an empty array. return []; } + +/** + * Centralized pure function to add tokens to an array of tokens. This is useful in the event we change the contract + * or if we have to do additional logic or processing. Without which we'd need to update 10+ locations. + * @param tokensToAdd + * @param target + * @returns + */ +export const addTokenToArray = ( + tokensToAdd: TokenReference[] | TokenReference, + target: TokenReference[] +) => { + // create new array without modifying the original array + const newArray = target.slice(); + + // add items to the array + // We should probably search the tokens array for matches or duplicates and then determine if we need to add them or update existing entries. + // TODO we also need to update the token member within the TokenRefernece object to be an array. + if (Array.isArray(tokensToAdd)) { + target.push(...tokensToAdd); + } else { + target.push(tokensToAdd); + } + return target; + // return arrayy without modifying the original array +}; diff --git a/packages/token-analyzer/src/types.ts b/packages/token-analyzer/src/types.ts index 06af5b407..f17ee7fd6 100644 --- a/packages/token-analyzer/src/types.ts +++ b/packages/token-analyzer/src/types.ts @@ -1,7 +1,7 @@ // types.ts export interface TokenReference { property: string; - token: string; + token: string[]; path: string[]; isVariableReference?: boolean; sourceFile?: string; From 59ac866ff081af90167303353f331e38951fd5df Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 17 Apr 2025 16:48:20 -0700 Subject: [PATCH 31/75] refactor token add logic update token member to array for priority --- .../src/__tests__/analyzer.test.ts | 8 +- .../src/__tests__/cssVarE2E.test.ts | 26 +- .../token-analyzer/src/__tests__/e2e.test.ts | 3 +- .../src/__tests__/test-files/analysis.json | 248 +++++++++--------- packages/token-analyzer/src/astAnalyzer.ts | 57 ++-- .../src/cssVarTokenExtractor.ts | 58 ++-- packages/token-analyzer/src/importAnalyzer.ts | 240 ++++++++++++----- packages/token-analyzer/src/tokenUtils.ts | 11 +- 8 files changed, 387 insertions(+), 264 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/analyzer.test.ts b/packages/token-analyzer/src/__tests__/analyzer.test.ts index 718523862..69d1cb76a 100644 --- a/packages/token-analyzer/src/__tests__/analyzer.test.ts +++ b/packages/token-analyzer/src/__tests__/analyzer.test.ts @@ -42,13 +42,13 @@ describe('Token Analyzer', () => { expect(styles.useStyles.root.tokens).toContainEqual( expect.objectContaining({ property: 'color', - token: 'tokens.colorNeutralForeground1', + token: ['tokens.colorNeutralForeground1'], }) ); expect(styles.useStyles.root.tokens).toContainEqual( expect.objectContaining({ property: 'borderRightColor', - token: 'tokens.colorNeutralStrokeDisabled', + token: ['tokens.colorNeutralStrokeDisabled'], }) ); @@ -56,7 +56,7 @@ describe('Token Analyzer', () => { expect(styles.useStyles.anotherSlot.tokens).toContainEqual( expect.objectContaining({ property: 'color', - token: 'tokens.colorNeutralForeground2', + token: ['tokens.colorNeutralForeground2'], }) ); @@ -67,7 +67,7 @@ describe('Token Analyzer', () => { expect(focusStyle?.tokens[0]).toEqual({ path: [':focus', 'textDecorationColor'], property: 'textDecorationColor', - token: 'tokens.colorStrokeFocus2', + token: ['tokens.colorStrokeFocus2'], }); // Verify metadata for conditional styles diff --git a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts index 1e4e7018d..755fa9396 100644 --- a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts +++ b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts @@ -104,7 +104,7 @@ describe('CSS Variable Token Extraction E2E', () => { expect(useStyles.direct.tokens).toContainEqual( expect.objectContaining({ property: 'color', - token: 'tokens.colorNeutralForeground1', + token: ['tokens.colorNeutralForeground1'], }) ); @@ -113,7 +113,7 @@ describe('CSS Variable Token Extraction E2E', () => { expect(useStyles.cssVar.tokens).toContainEqual( expect.objectContaining({ property: 'color', - token: 'tokens.colorBrandForeground4', + token: ['tokens.colorBrandForeground4'], }) ); @@ -122,7 +122,7 @@ describe('CSS Variable Token Extraction E2E', () => { expect(useStyles.importedToken.tokens).toContainEqual( expect.objectContaining({ property: 'color', - token: 'tokens.colorBrandForeground6', + token: ['tokens.colorBrandForeground6'], isVariableReference: true, }) ); @@ -132,7 +132,7 @@ describe('CSS Variable Token Extraction E2E', () => { expect(useStyles.importedCssVar.tokens).toContainEqual( expect.objectContaining({ property: 'color', - token: 'tokens.colorBrandForeground3', + token: ['tokens.colorBrandForeground3'], isVariableReference: true, }) ); @@ -142,7 +142,7 @@ describe('CSS Variable Token Extraction E2E', () => { expect(useStyles.nestedCssVar.tokens).toContainEqual( expect.objectContaining({ property: 'background', - token: 'tokens.colorBrandForeground2', + token: ['tokens.colorBrandForeground2'], }) ); @@ -151,7 +151,7 @@ describe('CSS Variable Token Extraction E2E', () => { expect(useStyles.importedNestedVar.tokens).toContainEqual( expect.objectContaining({ property: 'color', - token: 'tokens.colorNeutralForeground3', + token: ['tokens.colorNeutralForeground3'], isVariableReference: true, }) ); @@ -161,11 +161,11 @@ describe('CSS Variable Token Extraction E2E', () => { expect(useStyles.importedComplexVar.tokens).toEqual( expect.arrayContaining([ expect.objectContaining({ - token: 'tokens.colorBrandBackground', + token: ['tokens.colorBrandBackground'], isVariableReference: true, }), expect.objectContaining({ - token: 'tokens.colorNeutralBackground1', + token: ['tokens.colorNeutralBackground1'], isVariableReference: true, }), ]) @@ -280,30 +280,30 @@ describe('CSS Variable Cross-Module Resolution E2E', () => { // Direct import of token expect.objectContaining({ property: 'color', - token: 'tokens.colorBrandPrimary', + token: ['tokens.colorBrandPrimary'], isVariableReference: true, }), // Import of CSS var with token expect.objectContaining({ property: 'backgroundColor', - token: 'tokens.colorBrandPrimary', + token: ['tokens.colorBrandPrimary'], isVariableReference: true, }), // Import of nested CSS var with token expect.objectContaining({ property: 'border', - token: 'tokens.colorBrandSecondary', + token: ['tokens.colorBrandSecondary'], isVariableReference: true, }), // Multiple tokens from a complex var expect.objectContaining({ property: 'padding', - token: 'tokens.colorBrandPrimary', + token: ['tokens.colorBrandPrimary'], isVariableReference: true, }), expect.objectContaining({ property: 'padding', - token: 'tokens.colorBrandSecondary', + token: ['tokens.colorBrandSecondary'], isVariableReference: true, }), ]) diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index 06a53775d..2ab07fc18 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -54,7 +54,8 @@ describe('e2e test', () => { const tokenTestFactory = (tokenArray: any) => { return (propertyName: string, expectedToken: string) => { const token = tokenArray.some( - (t: any) => t.property === propertyName && t.token === expectedToken + (t: any) => + t.property === propertyName && t.token.includes(expectedToken) ); expect(token).toBeTruthy(); }; diff --git a/packages/token-analyzer/src/__tests__/test-files/analysis.json b/packages/token-analyzer/src/__tests__/test-files/analysis.json index 66d0800f7..7f5cbc27f 100644 --- a/packages/token-analyzer/src/__tests__/test-files/analysis.json +++ b/packages/token-analyzer/src/__tests__/test-files/analysis.json @@ -6,62 +6,62 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorNeutralBackground1", + "token": ["tokens.colorNeutralBackground1"], "path": ["backgroundColor"] }, { "property": "color", - "token": "tokens.colorNeutralForeground1", + "token": ["tokens.colorNeutralForeground1"], "path": ["color"] }, { "property": "border", - "token": "tokens.strokeWidthThin", + "token": ["tokens.strokeWidthThin"], "path": ["border"] }, { "property": "border", - "token": "tokens.colorNeutralStroke1", + "token": ["tokens.colorNeutralStroke1"], "path": ["border"] }, { "property": "fontFamily", - "token": "tokens.fontFamilyBase", + "token": ["tokens.fontFamilyBase"], "path": ["fontFamily"] }, { "property": "padding", - "token": "tokens.spacingHorizontalM", + "token": ["tokens.spacingHorizontalM"], "path": ["padding"] }, { "property": "borderRadius", - "token": "tokens.borderRadiusMedium", + "token": ["tokens.borderRadiusMedium"], "path": ["borderRadius"] }, { "property": "fontSize", - "token": "tokens.fontSizeBase300", + "token": ["tokens.fontSizeBase300"], "path": ["fontSize"] }, { "property": "fontWeight", - "token": "tokens.fontWeightSemibold", + "token": ["tokens.fontWeightSemibold"], "path": ["fontWeight"] }, { "property": "lineHeight", - "token": "tokens.lineHeightBase300", + "token": ["tokens.lineHeightBase300"], "path": ["lineHeight"] }, { "property": "transitionDuration", - "token": "tokens.durationFaster", + "token": ["tokens.durationFaster"], "path": ["transitionDuration"] }, { "property": "transitionTimingFunction", - "token": "tokens.curveEasyEase", + "token": ["tokens.curveEasyEase"], "path": ["transitionTimingFunction"] } ], @@ -70,17 +70,17 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorNeutralBackground1Hover", + "token": ["tokens.colorNeutralBackground1Hover"], "path": ["':hover'", "backgroundColor"] }, { "property": "borderColor", - "token": "tokens.colorNeutralStroke1Hover", + "token": ["tokens.colorNeutralStroke1Hover"], "path": ["':hover'", "borderColor"] }, { "property": "color", - "token": "tokens.colorNeutralForeground1Hover", + "token": ["tokens.colorNeutralForeground1Hover"], "path": ["':hover'", "color"] } ] @@ -89,17 +89,17 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorNeutralBackground1Pressed", + "token": ["tokens.colorNeutralBackground1Pressed"], "path": ["':hover:active'", "backgroundColor"] }, { "property": "borderColor", - "token": "tokens.colorNeutralStroke1Pressed", + "token": ["tokens.colorNeutralStroke1Pressed"], "path": ["':hover:active'", "borderColor"] }, { "property": "color", - "token": "tokens.colorNeutralForeground1Pressed", + "token": ["tokens.colorNeutralForeground1Pressed"], "path": ["':hover:active'", "color"] } ] @@ -108,32 +108,32 @@ "tokens": [ { "property": "borderColor", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": [":focus", "borderColor"] }, { "property": "borderRadius", - "token": "tokens.borderRadiusMedium", + "token": ["tokens.borderRadiusMedium"], "path": [":focus", "borderRadius"] }, { "property": "outline", - "token": "tokens.strokeWidthThick", + "token": ["tokens.strokeWidthThick"], "path": [":focus", "outline"] }, { "property": "outline", - "token": "tokens.colorTransparentStroke", + "token": ["tokens.colorTransparentStroke"], "path": [":focus", "outline"] }, { "property": "boxShadow", - "token": "tokens.strokeWidthThin", + "token": ["tokens.strokeWidthThin"], "path": [":focus", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": [":focus", "boxShadow"] } ] @@ -145,12 +145,12 @@ "tokens": [ { "property": "boxShadow", - "token": "tokens.strokeWidthThin", + "token": ["tokens.strokeWidthThin"], "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] } ] @@ -167,7 +167,7 @@ "tokens": [ { "property": "[iconSpacingVar]", - "token": "tokens.spacingHorizontalSNudge", + "token": ["tokens.spacingHorizontalSNudge"], "path": ["[iconSpacingVar]"] } ], @@ -181,7 +181,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["backgroundColor"] } ], @@ -190,7 +190,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackgroundHover", + "token": ["tokens.colorTransparentBackgroundHover"], "path": ["':hover'", "backgroundColor"] } ] @@ -199,7 +199,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackgroundPressed", + "token": ["tokens.colorTransparentBackgroundPressed"], "path": ["':hover:active'", "backgroundColor"] } ] @@ -211,12 +211,12 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorBrandBackground", + "token": ["tokens.colorBrandBackground"], "path": ["backgroundColor"] }, { "property": "color", - "token": "tokens.colorNeutralForegroundOnBrand", + "token": ["tokens.colorNeutralForegroundOnBrand"], "path": ["color"] } ], @@ -225,12 +225,12 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorBrandBackgroundHover", + "token": ["tokens.colorBrandBackgroundHover"], "path": ["':hover'", "backgroundColor"] }, { "property": "color", - "token": "tokens.colorNeutralForegroundOnBrand", + "token": ["tokens.colorNeutralForegroundOnBrand"], "path": ["':hover'", "color"] } ] @@ -239,12 +239,12 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorBrandBackgroundPressed", + "token": ["tokens.colorBrandBackgroundPressed"], "path": ["':hover:active'", "backgroundColor"] }, { "property": "color", - "token": "tokens.colorNeutralForegroundOnBrand", + "token": ["tokens.colorNeutralForegroundOnBrand"], "path": ["':hover:active'", "color"] } ] @@ -256,12 +256,12 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorSubtleBackground", + "token": ["tokens.colorSubtleBackground"], "path": ["backgroundColor"] }, { "property": "color", - "token": "tokens.colorNeutralForeground2", + "token": ["tokens.colorNeutralForeground2"], "path": ["color"] } ], @@ -270,12 +270,12 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorSubtleBackgroundHover", + "token": ["tokens.colorSubtleBackgroundHover"], "path": ["':hover'", "backgroundColor"] }, { "property": "color", - "token": "tokens.colorNeutralForeground2Hover", + "token": ["tokens.colorNeutralForeground2Hover"], "path": ["':hover'", "color"] } ], @@ -284,7 +284,7 @@ "tokens": [ { "property": "color", - "token": "tokens.colorNeutralForeground2BrandHover", + "token": ["tokens.colorNeutralForeground2BrandHover"], "path": ["':hover'", "[`& .${buttonClassNames.icon}`]", "color"] } ] @@ -295,12 +295,12 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorSubtleBackgroundPressed", + "token": ["tokens.colorSubtleBackgroundPressed"], "path": ["':hover:active'", "backgroundColor"] }, { "property": "color", - "token": "tokens.colorNeutralForeground2Pressed", + "token": ["tokens.colorNeutralForeground2Pressed"], "path": ["':hover:active'", "color"] } ], @@ -309,7 +309,7 @@ "tokens": [ { "property": "color", - "token": "tokens.colorNeutralForeground2BrandPressed", + "token": ["tokens.colorNeutralForeground2BrandPressed"], "path": ["':hover:active'", "[`& .${buttonClassNames.icon}`]", "color"] } ] @@ -323,12 +323,12 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["backgroundColor"] }, { "property": "color", - "token": "tokens.colorNeutralForeground2", + "token": ["tokens.colorNeutralForeground2"], "path": ["color"] } ], @@ -337,12 +337,12 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackgroundHover", + "token": ["tokens.colorTransparentBackgroundHover"], "path": ["':hover'", "backgroundColor"] }, { "property": "color", - "token": "tokens.colorNeutralForeground2BrandHover", + "token": ["tokens.colorNeutralForeground2BrandHover"], "path": ["':hover'", "color"] } ] @@ -351,12 +351,12 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackgroundPressed", + "token": ["tokens.colorTransparentBackgroundPressed"], "path": ["':hover:active'", "backgroundColor"] }, { "property": "color", - "token": "tokens.colorNeutralForeground2BrandPressed", + "token": ["tokens.colorNeutralForeground2BrandPressed"], "path": ["':hover:active'", "color"] } ] @@ -368,7 +368,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["'@media (forced-colors: active)'", "':hover'", "backgroundColor"] } ] @@ -377,7 +377,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["'@media (forced-colors: active)'", "':hover:active'", "backgroundColor"] } ] @@ -391,7 +391,7 @@ "tokens": [ { "property": "borderRadius", - "token": "tokens.borderRadiusCircular", + "token": ["tokens.borderRadiusCircular"], "path": ["borderRadius"] } ], @@ -401,7 +401,7 @@ "tokens": [ { "property": "borderRadius", - "token": "tokens.borderRadiusNone", + "token": ["tokens.borderRadiusNone"], "path": ["borderRadius"] } ], @@ -411,27 +411,27 @@ "tokens": [ { "property": "padding", - "token": "tokens.spacingHorizontalS", + "token": ["tokens.spacingHorizontalS"], "path": ["padding"] }, { "property": "borderRadius", - "token": "tokens.borderRadiusMedium", + "token": ["tokens.borderRadiusMedium"], "path": ["borderRadius"] }, { "property": "fontSize", - "token": "tokens.fontSizeBase200", + "token": ["tokens.fontSizeBase200"], "path": ["fontSize"] }, { "property": "fontWeight", - "token": "tokens.fontWeightRegular", + "token": ["tokens.fontWeightRegular"], "path": ["fontWeight"] }, { "property": "lineHeight", - "token": "tokens.lineHeightBase200", + "token": ["tokens.lineHeightBase200"], "path": ["lineHeight"] } ], @@ -441,27 +441,27 @@ "tokens": [ { "property": "padding", - "token": "tokens.spacingHorizontalL", + "token": ["tokens.spacingHorizontalL"], "path": ["padding"] }, { "property": "borderRadius", - "token": "tokens.borderRadiusMedium", + "token": ["tokens.borderRadiusMedium"], "path": ["borderRadius"] }, { "property": "fontSize", - "token": "tokens.fontSizeBase400", + "token": ["tokens.fontSizeBase400"], "path": ["fontSize"] }, { "property": "fontWeight", - "token": "tokens.fontWeightSemibold", + "token": ["tokens.fontWeightSemibold"], "path": ["fontWeight"] }, { "property": "lineHeight", - "token": "tokens.lineHeightBase400", + "token": ["tokens.lineHeightBase400"], "path": ["lineHeight"] } ], @@ -473,32 +473,32 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorNeutralBackgroundDisabled", + "token": ["tokens.colorNeutralBackgroundDisabled"], "path": ["backgroundColor"] }, { "property": "borderTopColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["borderTopColor"] }, { "property": "borderRightColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["borderRightColor"] }, { "property": "borderBottomColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["borderBottomColor"] }, { "property": "borderLeftColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["borderLeftColor"] }, { "property": "color", - "token": "tokens.colorNeutralForegroundDisabled", + "token": ["tokens.colorNeutralForegroundDisabled"], "path": ["color"] } ], @@ -507,7 +507,7 @@ "tokens": [ { "property": "color", - "token": "tokens.colorNeutralForegroundDisabled", + "token": ["tokens.colorNeutralForegroundDisabled"], "path": ["[`& .${buttonClassNames.icon}`]", "color"] } ] @@ -516,32 +516,32 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorNeutralBackgroundDisabled", + "token": ["tokens.colorNeutralBackgroundDisabled"], "path": ["':hover'", "backgroundColor"] }, { "property": "borderTopColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["':hover'", "borderTopColor"] }, { "property": "borderRightColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["':hover'", "borderRightColor"] }, { "property": "borderBottomColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["':hover'", "borderBottomColor"] }, { "property": "borderLeftColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["':hover'", "borderLeftColor"] }, { "property": "color", - "token": "tokens.colorNeutralForegroundDisabled", + "token": ["tokens.colorNeutralForegroundDisabled"], "path": ["':hover'", "color"] } ], @@ -550,7 +550,7 @@ "tokens": [ { "property": "color", - "token": "tokens.colorNeutralForegroundDisabled", + "token": ["tokens.colorNeutralForegroundDisabled"], "path": ["':hover'", "[`& .${buttonClassNames.icon}`]", "color"] } ] @@ -561,32 +561,32 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorNeutralBackgroundDisabled", + "token": ["tokens.colorNeutralBackgroundDisabled"], "path": ["':hover:active'", "backgroundColor"] }, { "property": "borderTopColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["':hover:active'", "borderTopColor"] }, { "property": "borderRightColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["':hover:active'", "borderRightColor"] }, { "property": "borderBottomColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["':hover:active'", "borderBottomColor"] }, { "property": "borderLeftColor", - "token": "tokens.colorNeutralStrokeDisabled", + "token": ["tokens.colorNeutralStrokeDisabled"], "path": ["':hover:active'", "borderLeftColor"] }, { "property": "color", - "token": "tokens.colorNeutralForegroundDisabled", + "token": ["tokens.colorNeutralForegroundDisabled"], "path": ["':hover:active'", "color"] } ], @@ -595,7 +595,7 @@ "tokens": [ { "property": "color", - "token": "tokens.colorNeutralForegroundDisabled", + "token": ["tokens.colorNeutralForegroundDisabled"], "path": ["':hover:active'", "[`& .${buttonClassNames.icon}`]", "color"] } ] @@ -609,7 +609,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["backgroundColor"] } ], @@ -618,7 +618,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["':hover'", "backgroundColor"] } ] @@ -627,7 +627,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["':hover:active'", "backgroundColor"] } ] @@ -639,7 +639,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["backgroundColor"] } ], @@ -648,7 +648,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["':hover'", "backgroundColor"] } ] @@ -657,7 +657,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["':hover:active'", "backgroundColor"] } ] @@ -669,7 +669,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["backgroundColor"] } ], @@ -678,7 +678,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["':hover'", "backgroundColor"] } ] @@ -687,7 +687,7 @@ "tokens": [ { "property": "backgroundColor", - "token": "tokens.colorTransparentBackground", + "token": ["tokens.colorTransparentBackground"], "path": ["':hover:active'", "backgroundColor"] } ] @@ -704,7 +704,7 @@ "tokens": [ { "property": "borderRadius", - "token": "tokens.borderRadiusCircular", + "token": ["tokens.borderRadiusCircular"], "path": [":focus", "borderRadius"] } ] @@ -719,7 +719,7 @@ "tokens": [ { "property": "borderRadius", - "token": "tokens.borderRadiusNone", + "token": ["tokens.borderRadiusNone"], "path": [":focus", "borderRadius"] } ] @@ -734,47 +734,47 @@ "tokens": [ { "property": "borderTopColor", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": [":focus", "borderTopColor"] }, { "property": "borderRightColor", - "token": "tokens.colorStrokeFocus1", + "token": ["tokens.colorStrokeFocus1"], "path": [":focus", "borderRightColor"] }, { "property": "borderBottomColor", - "token": "tokens.borderRadiusCircular", + "token": ["tokens.borderRadiusCircular"], "path": [":focus", "borderBottomColor"] }, { "property": "borderLeftColor", - "token": "tokens.colorStrokeFocus1", + "token": ["tokens.colorStrokeFocus1"], "path": [":focus", "borderLeftColor"] }, { "property": "boxShadow", - "token": "tokens.shadow2", + "token": ["tokens.shadow2"], "path": [":focus", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.strokeWidthThin", + "token": ["tokens.strokeWidthThin"], "path": [":focus", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": [":focus", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.strokeWidthThick", + "token": ["tokens.strokeWidthThick"], "path": [":focus", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.colorNeutralForegroundOnBrand", + "token": ["tokens.colorNeutralForegroundOnBrand"], "path": [":focus", "boxShadow"] } ], @@ -783,37 +783,37 @@ "tokens": [ { "property": "boxShadow", - "token": "tokens.shadow2", + "token": ["tokens.shadow2"], "path": [":focus", "':hover'", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.strokeWidthThin", + "token": ["tokens.strokeWidthThin"], "path": [":focus", "':hover'", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": [":focus", "':hover'", "boxShadow"] }, { "property": "borderTopColor", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": [":focus", "':hover'", "borderTopColor"] }, { "property": "borderRightColor", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": [":focus", "':hover'", "borderRightColor"] }, { "property": "borderBottomColor", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": [":focus", "':hover'", "borderBottomColor"] }, { "property": "borderLeftColor", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": [":focus", "':hover'", "borderLeftColor"] } ] @@ -827,27 +827,27 @@ "tokens": [ { "property": "boxShadow", - "token": "tokens.shadow2", + "token": ["tokens.shadow2"], "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.strokeWidthThin", + "token": ["tokens.strokeWidthThin"], "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.strokeWidthThick", + "token": ["tokens.strokeWidthThick"], "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.colorNeutralForegroundOnBrand", + "token": ["tokens.colorNeutralForegroundOnBrand"], "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] } ], @@ -856,17 +856,17 @@ "tokens": [ { "property": "boxShadow", - "token": "tokens.shadow2", + "token": ["tokens.shadow2"], "path": ["'@supports (-moz-appearance:button)'", ":focus", "':hover'", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.strokeWidthThin", + "token": ["tokens.strokeWidthThin"], "path": ["'@supports (-moz-appearance:button)'", ":focus", "':hover'", "boxShadow"] }, { "property": "boxShadow", - "token": "tokens.colorStrokeFocus2", + "token": ["tokens.colorStrokeFocus2"], "path": ["'@supports (-moz-appearance:button)'", ":focus", "':hover'", "boxShadow"] } ] @@ -885,7 +885,7 @@ "tokens": [ { "property": "borderRadius", - "token": "tokens.borderRadiusSmall", + "token": ["tokens.borderRadiusSmall"], "path": [":focus", "borderRadius"] } ] @@ -900,7 +900,7 @@ "tokens": [ { "property": "borderRadius", - "token": "tokens.borderRadiusLarge", + "token": ["tokens.borderRadiusLarge"], "path": [":focus", "borderRadius"] } ] @@ -915,7 +915,7 @@ "tokens": [ { "property": "[iconSpacingVar]", - "token": "tokens.spacingHorizontalXS", + "token": ["tokens.spacingHorizontalXS"], "path": ["[iconSpacingVar]"] } ], @@ -925,7 +925,7 @@ "tokens": [ { "property": "[iconSpacingVar]", - "token": "tokens.spacingHorizontalSNudge", + "token": ["tokens.spacingHorizontalSNudge"], "path": ["[iconSpacingVar]"] } ], diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index d446d9594..b340fced3 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -23,6 +23,7 @@ import { } from './importAnalyzer.js'; import { extractTokensFromCssVars } from './cssVarTokenExtractor.js'; import { + addTokenToArray, extractTokensFromText, getPropertiesForShorthand, isTokenReference, @@ -55,7 +56,7 @@ function processStyleProperty( importedValues: Map | undefined = undefined, isResetStyles?: boolean ): TokenReference[] { - const tokens: TokenReference[] = []; + let tokens: TokenReference[] = []; const parentName = Node.isPropertyAssignment(prop) ? prop.getName() : ''; function processNode(node?: Node, path: string[] = []): void { @@ -80,17 +81,20 @@ function processStyleProperty( path, TOKEN_REGEX ); - tokens.push(...cssVarTokens); + tokens = addTokenToArray(cssVarTokens, tokens); } else { // Check for direct token references const matches = extractTokensFromText(node); if (matches.length > 0) { matches.forEach((match) => { - tokens.push({ - property: path[path.length - 1] || parentName, - token: match, - path, - }); + tokens = addTokenToArray( + { + property: path[path.length - 1] || parentName, + token: [match], + path, + }, + tokens + ); }); } } @@ -101,11 +105,14 @@ function processStyleProperty( const matches = extractTokensFromText(node); if (matches.length > 0) { matches.forEach((match) => { - tokens.push({ - property: path[path.length - 1] || parentName, - token: match, - path, - }); + tokens = addTokenToArray( + { + property: path[path.length - 1] || parentName, + token: [match], + path, + }, + tokens + ); }); } @@ -124,11 +131,14 @@ function processStyleProperty( const text = node.getText(); const isToken = isTokenReference(text); if (isToken) { - tokens.push({ - property: path[path.length - 1] || parentName, - token: text, - path, - }); + tokens = addTokenToArray( + { + property: path[path.length - 1] || parentName, + token: [text], + path, + }, + tokens + ); } } else if (Node.isObjectLiteralExpression(node)) { node.getProperties().forEach((childProp) => { @@ -203,11 +213,14 @@ function processStyleProperty( if (affectedProperties.length > 0) { // Process each argument and apply it to all affected properties affectedProperties.forEach((argument) => { - tokens.push({ - property: argument.property, - token: argument.token, - path: path.concat(argument.property), - }); + tokens = addTokenToArray( + { + property: argument.property, + token: [argument.token], + path: path.concat(argument.property), + }, + tokens + ); }); } else { // Generic handling of functions that are not whitelisted diff --git a/packages/token-analyzer/src/cssVarTokenExtractor.ts b/packages/token-analyzer/src/cssVarTokenExtractor.ts index ed8753e92..1c75ad1c5 100644 --- a/packages/token-analyzer/src/cssVarTokenExtractor.ts +++ b/packages/token-analyzer/src/cssVarTokenExtractor.ts @@ -1,7 +1,7 @@ // cssVarTokenExtractor.ts import { log } from './debugUtils.js'; import { TokenReference } from './types.js'; -import { extractTokensFromText } from './tokenUtils.js'; +import { addTokenToArray, extractTokensFromText } from './tokenUtils.js'; /** * Extracts token references from CSS variable syntax including nested fallback chains @@ -17,22 +17,25 @@ export function extractTokensFromCssVars( value: string, propertyName: string, path: string[] = [], - TOKEN_REGEX: RegExp, + TOKEN_REGEX: RegExp ): TokenReference[] { - const tokens: TokenReference[] = []; + let tokens: TokenReference[] = []; let testValue = value; // Direct token matches in the string const directMatches = extractTokensFromText(testValue); if (directMatches.length > 0) { - directMatches.forEach(match => { + directMatches.forEach((match) => { testValue = testValue.replace(match, ''); // Remove direct matches from the string - tokens.push({ - property: propertyName, - token: match, - path, - }); + tokens = addTokenToArray( + { + property: propertyName, + token: [match], + path, + }, + tokens + ); }); } @@ -55,12 +58,15 @@ export function extractTokensFromCssVars( // Check if the variable name contains a token reference const varNameTokens = extractTokensFromText(varName); if (varNameTokens.length > 0) { - varNameTokens.forEach(token => { - tokens.push({ - property: propertyName, - token, - path, - }); + varNameTokens.forEach((token) => { + tokens = addTokenToArray( + { + property: propertyName, + token: [token], + path, + }, + tokens + ); }); } @@ -68,18 +74,26 @@ export function extractTokensFromCssVars( if (fallback) { // Recursively process the fallback if (fallback.includes('var(')) { - const fallbackTokens = extractTokensFromCssVars(fallback, propertyName, path, TOKEN_REGEX); + const fallbackTokens = extractTokensFromCssVars( + fallback, + propertyName, + path, + TOKEN_REGEX + ); tokens.push(...fallbackTokens); } else { // Check for direct token references in the fallback const fallbackTokens = extractTokensFromText(fallback); if (fallbackTokens.length > 0) { - fallbackTokens.forEach(token => { - tokens.push({ - property: propertyName, - token, - path, - }); + fallbackTokens.forEach((token) => { + tokens = addTokenToArray( + { + property: propertyName, + token: [token], + path, + }, + tokens + ); }); } } diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index bd76e6e9a..930488adb 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -1,10 +1,22 @@ // importAnalyzer.ts -import { Project, Node, SourceFile, ImportDeclaration, Symbol, TypeChecker, SyntaxKind } from 'ts-morph'; +import { + Project, + Node, + SourceFile, + ImportDeclaration, + Symbol, + TypeChecker, + SyntaxKind, +} from 'ts-morph'; import { log } from './debugUtils.js'; import { TokenReference, TOKEN_REGEX } from './types.js'; import { getModuleSourceFile } from './moduleResolver.js'; import { extractTokensFromCssVars } from './cssVarTokenExtractor.js'; -import { isTokenReference, extractTokensFromText } from './tokenUtils.js'; +import { + isTokenReference, + extractTokensFromText, + addTokenToArray, +} from './tokenUtils.js'; /** * Represents a portion of a template expression @@ -32,7 +44,10 @@ export interface ImportedValue { /** * Analyzes imports in a source file to extract string values */ -export async function analyzeImports(sourceFile: SourceFile, project: Project): Promise> { +export async function analyzeImports( + sourceFile: SourceFile, + project: Project +): Promise> { const importedValues = new Map(); const filePath = sourceFile.getFilePath(); @@ -45,9 +60,18 @@ export async function analyzeImports(sourceFile: SourceFile, project: Project): for (const importDecl of sourceFile.getImportDeclarations()) { try { // Process the import declaration - await processImportDeclaration(importDecl, sourceFile, project, importedValues, typeChecker); + await processImportDeclaration( + importDecl, + sourceFile, + project, + importedValues, + typeChecker + ); } catch (err) { - log(`Error processing import: ${importDecl.getModuleSpecifierValue()}`, err); + log( + `Error processing import: ${importDecl.getModuleSpecifierValue()}`, + err + ); } } @@ -62,13 +86,17 @@ async function processImportDeclaration( sourceFile: SourceFile, project: Project, importedValues: Map, - typeChecker: TypeChecker, + typeChecker: TypeChecker ): Promise { const moduleSpecifier = importDecl.getModuleSpecifierValue(); const containingFilePath = sourceFile.getFilePath(); // Use our module resolver to get the imported file - const importedFile = getModuleSourceFile(project, moduleSpecifier, containingFilePath); + const importedFile = getModuleSourceFile( + project, + moduleSpecifier, + containingFilePath + ); if (!importedFile) { log(`Could not resolve module: ${moduleSpecifier}`); @@ -76,10 +104,22 @@ async function processImportDeclaration( } // Process named imports (import { x } from 'module') - processNamedImports(importDecl, importedFile, project, importedValues, typeChecker); + processNamedImports( + importDecl, + importedFile, + project, + importedValues, + typeChecker + ); // Process default import (import x from 'module') - processDefaultImport(importDecl, importedFile, project, importedValues, typeChecker); + processDefaultImport( + importDecl, + importedFile, + project, + importedValues, + typeChecker + ); } /** @@ -90,14 +130,18 @@ function processNamedImports( importedFile: SourceFile, project: Project, importedValues: Map, - typeChecker: TypeChecker, + typeChecker: TypeChecker ): void { for (const namedImport of importDecl.getNamedImports()) { const importName = namedImport.getName(); const alias = namedImport.getAliasNode()?.getText() || importName; // Find the export's true source using TypeScript's type checker - const exportInfo = findExportDeclaration(importedFile, importName, typeChecker); + const exportInfo = findExportDeclaration( + importedFile, + importName, + typeChecker + ); if (exportInfo) { const { declaration, sourceFile: declarationFile } = exportInfo; @@ -113,7 +157,11 @@ function processNamedImports( templateSpans: valueInfo.templateSpans, }); - log(`Added imported value: ${alias} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); + log( + `Added imported value: ${alias} = ${ + valueInfo.value + } from ${declarationFile.getFilePath()}` + ); } } } @@ -127,7 +175,7 @@ function processDefaultImport( importedFile: SourceFile, project: Project, importedValues: Map, - typeChecker: TypeChecker, + typeChecker: TypeChecker ): void { const defaultImport = importDecl.getDefaultImport(); if (!defaultImport) { @@ -137,7 +185,11 @@ function processDefaultImport( const importName = defaultImport.getText(); // Find the default export's true source - const exportInfo = findExportDeclaration(importedFile, 'default', typeChecker); + const exportInfo = findExportDeclaration( + importedFile, + 'default', + typeChecker + ); if (exportInfo) { const { declaration, sourceFile: declarationFile } = exportInfo; @@ -153,7 +205,11 @@ function processDefaultImport( templateSpans: valueInfo.templateSpans, }); - log(`Added default import: ${importName} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); + log( + `Added default import: ${importName} = ${ + valueInfo.value + } from ${declarationFile.getFilePath()}` + ); } } } @@ -164,7 +220,7 @@ function processDefaultImport( function findExportDeclaration( sourceFile: SourceFile, exportName: string, - typeChecker: TypeChecker, + typeChecker: TypeChecker ): { declaration: Node; sourceFile: SourceFile } | undefined { try { // Get the source file's symbol (represents the module) @@ -182,9 +238,13 @@ function findExportDeclaration( } // Find the specific export we're looking for - const exportSymbol = exports.find((symbol: Symbol) => symbol.getName() === exportName); + const exportSymbol = exports.find( + (symbol: Symbol) => symbol.getName() === exportName + ); if (!exportSymbol) { - log(`Export symbol '${exportName}' not found in ${sourceFile.getFilePath()}`); + log( + `Export symbol '${exportName}' not found in ${sourceFile.getFilePath()}` + ); return undefined; } @@ -220,7 +280,7 @@ function findExportDeclaration( const declarationSourceFile = valueDeclaration.getSourceFile(); log( - `Found declaration for '${exportName}': ${valueDeclaration.getKindName()} in ${declarationSourceFile.getFilePath()}`, + `Found declaration for '${exportName}': ${valueDeclaration.getKindName()} in ${declarationSourceFile.getFilePath()}` ); return { declaration: valueDeclaration, @@ -237,8 +297,10 @@ function findExportDeclaration( */ function extractValueFromDeclaration( declaration: Node, - typeChecker: TypeChecker, -): { value: string; isLiteral: boolean; templateSpans?: TemplateSpan[] } | undefined { + typeChecker: TypeChecker +): + | { value: string; isLiteral: boolean; templateSpans?: TemplateSpan[] } + | undefined { // Handle variable declarations if (Node.isVariableDeclaration(declaration)) { const initializer = declaration.getInitializer(); @@ -257,7 +319,9 @@ function extractValueFromDeclaration( const sourceFile = declaration.getSourceFile(); // Find the local declaration with this name - for (const varDecl of sourceFile.getDescendantsOfKind(SyntaxKind.VariableDeclaration)) { + for (const varDecl of sourceFile.getDescendantsOfKind( + SyntaxKind.VariableDeclaration + )) { if (varDecl.getName() === name) { const initializer = varDecl.getInitializer(); return extractValueFromExpression(initializer, typeChecker); @@ -273,7 +337,7 @@ function extractValueFromDeclaration( */ function extractValueFromExpression( expression: Node | undefined, - typeChecker: TypeChecker, + typeChecker: TypeChecker ): | { value: string; @@ -314,7 +378,10 @@ function extractValueFromExpression( const literal = span.getLiteral().getLiteralText(); // Handle different types of expressions in template spans - if (Node.isPropertyAccessExpression(spanExpr) && isTokenReference(spanExpr)) { + if ( + Node.isPropertyAccessExpression(spanExpr) && + isTokenReference(spanExpr) + ) { // Direct token reference in template span templateSpans.push({ text: spanText, @@ -431,9 +498,9 @@ export function processImportedStringTokens( propertyName: string, value: string, path: string[] = [], - tokenRegex: RegExp = TOKEN_REGEX, + tokenRegex: RegExp = TOKEN_REGEX ): TokenReference[] { - const tokens: TokenReference[] = []; + let tokens: TokenReference[] = []; // Check if the value is an imported value reference if (importedValues.has(value)) { @@ -441,7 +508,7 @@ export function processImportedStringTokens( // If we've already pre-resolved tokens for this value, use them if (importedValue.resolvedTokens) { - return importedValue.resolvedTokens.map(token => ({ + return importedValue.resolvedTokens.map((token) => ({ ...token, property: propertyName, // Update property name for current context path: path, // Update path for current context @@ -454,27 +521,39 @@ export function processImportedStringTokens( for (const span of importedValue.templateSpans) { if (span.isToken) { // Direct token reference in span - tokens.push({ - property: propertyName, - token: span.text, - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); - } else if (span.isReference && span.referenceName && importedValues.has(span.referenceName)) { + tokens = addTokenToArray( + { + property: propertyName, + token: [span.text], + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }, + tokens + ); + } else if ( + span.isReference && + span.referenceName && + importedValues.has(span.referenceName) + ) { // Reference to another imported value - process recursively const spanTokens = processImportedStringTokens( importedValues, propertyName, span.referenceName, path, - tokenRegex, + tokenRegex ); tokens.push(...spanTokens); } else if (span.text.includes('var(')) { // Check for CSS variables in the span text - const cssVarTokens = extractTokensFromCssVars(span.text, propertyName, path, tokenRegex); - cssVarTokens.forEach(token => { + const cssVarTokens = extractTokensFromCssVars( + span.text, + propertyName, + path, + tokenRegex + ); + cssVarTokens.forEach((token) => { tokens.push({ ...token, isVariableReference: true, @@ -485,14 +564,17 @@ export function processImportedStringTokens( // Check for direct token matches in non-reference spans const matches = extractTokensFromText(span.text); if (matches.length > 0) { - matches.forEach(match => { - tokens.push({ - property: propertyName, - token: match, - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); + matches.forEach((match) => { + tokens = addTokenToArray( + { + property: propertyName, + token: [match], + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }, + tokens + ); }); } } @@ -502,19 +584,27 @@ export function processImportedStringTokens( // First, check for direct token references const matches = extractTokensFromText(importedValue.value); if (matches.length > 0) { - matches.forEach(match => { - tokens.push({ - property: propertyName, - token: match, - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); + matches.forEach((match) => { + tokens = addTokenToArray( + { + property: propertyName, + token: [match], + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }, + tokens + ); }); } else if (importedValue.value.includes('var(')) { // Then check for CSS variable patterns - const cssVarTokens = extractTokensFromCssVars(importedValue.value, propertyName, path, tokenRegex); - cssVarTokens.forEach(token => { + const cssVarTokens = extractTokensFromCssVars( + importedValue.value, + propertyName, + path, + tokenRegex + ); + cssVarTokens.forEach((token) => { tokens.push({ ...token, isVariableReference: true, @@ -526,32 +616,38 @@ export function processImportedStringTokens( } else { // Non-literal values (like property access expressions) if (isTokenReference(importedValue.value)) { - tokens.push({ - property: propertyName, - token: importedValue.value, - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); + tokens = addTokenToArray( + { + property: propertyName, + token: [importedValue.value], + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }, + tokens + ); } else { // Check for any token references in the value const matches = extractTokensFromText(importedValue.value); if (matches.length > 0) { - matches.forEach(match => { - tokens.push({ - property: propertyName, - token: match, - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); + matches.forEach((match) => { + tokens = addTokenToArray( + { + property: propertyName, + token: [match], + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }, + tokens + ); }); } } } // Cache the resolved tokens for future use - importedValue.resolvedTokens = tokens.map(token => ({ ...token })); + importedValue.resolvedTokens = tokens.map((token) => ({ ...token })); } return tokens; diff --git a/packages/token-analyzer/src/tokenUtils.ts b/packages/token-analyzer/src/tokenUtils.ts index e129e4e6b..f1fef55e1 100644 --- a/packages/token-analyzer/src/tokenUtils.ts +++ b/packages/token-analyzer/src/tokenUtils.ts @@ -168,13 +168,12 @@ export const addTokenToArray = ( const newArray = target.slice(); // add items to the array - // We should probably search the tokens array for matches or duplicates and then determine if we need to add them or update existing entries. - // TODO we also need to update the token member within the TokenRefernece object to be an array. if (Array.isArray(tokensToAdd)) { - target.push(...tokensToAdd); + newArray.push(...tokensToAdd); } else { - target.push(tokensToAdd); + newArray.push(tokensToAdd); } - return target; - // return arrayy without modifying the original array + + // return array without modifying the original array + return newArray; }; From b5a49935d2ee5e0dc7dbe7510a5928581fbac027 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 22 Apr 2025 02:22:10 -0700 Subject: [PATCH 32/75] start of refactor token analysis path --- packages/token-analyzer/src/astAnalyzer.ts | 81 ++------ .../src/cssVarTokenExtractor.ts | 6 +- packages/token-analyzer/src/importAnalyzer.ts | 6 +- packages/token-analyzer/src/tokenResolver.ts | 174 ++++++++++++++++++ 4 files changed, 190 insertions(+), 77 deletions(-) create mode 100644 packages/token-analyzer/src/tokenResolver.ts diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index b340fced3..a00feaf12 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -12,22 +12,17 @@ import { StyleCondition, StyleContent, StyleMetadata, - TOKEN_REGEX, StyleTokens, } from './types.js'; import { log, measure, measureAsync } from './debugUtils.js'; -import { - analyzeImports, - processImportedStringTokens, - ImportedValue, -} from './importAnalyzer.js'; +import { analyzeImports, ImportedValue } from './importAnalyzer.js'; import { extractTokensFromCssVars } from './cssVarTokenExtractor.js'; import { addTokenToArray, - extractTokensFromText, getPropertiesForShorthand, isTokenReference, } from './tokenUtils.js'; +import { resolveToken } from './tokenResolver'; const makeResetStylesToken = 'resetStyles'; @@ -53,7 +48,7 @@ interface VariableMapping { */ function processStyleProperty( prop: PropertyAssignment | SpreadAssignment, - importedValues: Map | undefined = undefined, + importedValues: Map, isResetStyles?: boolean ): TokenReference[] { let tokens: TokenReference[] = []; @@ -70,63 +65,12 @@ function processStyleProperty( } // Check for string literals or template expressions (string template literals) - if (Node.isStringLiteral(node) || Node.isTemplateExpression(node)) { - const text = node.getText().replace(/['"]/g, ''); // Remove quotes - - // Check for CSS var() syntax that might contain tokens - if (text.includes('var(')) { - const cssVarTokens = extractTokensFromCssVars( - text, - path[path.length - 1] || parentName, - path, - TOKEN_REGEX - ); - tokens = addTokenToArray(cssVarTokens, tokens); - } else { - // Check for direct token references - const matches = extractTokensFromText(node); - if (matches.length > 0) { - matches.forEach((match) => { - tokens = addTokenToArray( - { - property: path[path.length - 1] || parentName, - token: [match], - path, - }, - tokens - ); - }); - } - } - } else if (Node.isIdentifier(node)) { - const text = node.getText(); - - // First check if it matches the token regex directly - const matches = extractTokensFromText(node); - if (matches.length > 0) { - matches.forEach((match) => { - tokens = addTokenToArray( - { - property: path[path.length - 1] || parentName, - token: [match], - path, - }, - tokens - ); - }); - } - - // Then check if it's an imported value reference - if (importedValues && importedValues.has(text)) { - const importTokens = processImportedStringTokens( - importedValues, - path[path.length - 1] || parentName, - text, - path, - TOKEN_REGEX - ); - tokens.push(...importTokens); - } + if ( + Node.isStringLiteral(node) || + Node.isTemplateExpression(node) || + Node.isIdentifier(node) + ) { + tokens = resolveToken({ node, path, parentName, tokens, importedValues }); } else if (Node.isPropertyAccessExpression(node)) { const text = node.getText(); const isToken = isTokenReference(text); @@ -239,13 +183,12 @@ function processStyleProperty( } // Check for string literals in function arguments that might contain CSS variables with tokens if (Node.isStringLiteral(argument)) { - const text = argument.getText().replace(/['"]/g, ''); + const text = argument.getText(); if (text.includes('var(')) { const cssVarTokens = extractTokensFromCssVars( text, path[path.length - 1] || parentName, - [...path, functionName], - TOKEN_REGEX + [...path, functionName] ); tokens.push(...cssVarTokens); } @@ -446,7 +389,7 @@ function createMetadata(styleMappings: StyleMapping[]): StyleMetadata { */ async function analyzeMakeStyles( sourceFile: SourceFile, - importedValues: Map | undefined = undefined + importedValues: Map ): Promise { const analysis: StyleAnalysis = {}; diff --git a/packages/token-analyzer/src/cssVarTokenExtractor.ts b/packages/token-analyzer/src/cssVarTokenExtractor.ts index 1c75ad1c5..298e02065 100644 --- a/packages/token-analyzer/src/cssVarTokenExtractor.ts +++ b/packages/token-analyzer/src/cssVarTokenExtractor.ts @@ -16,8 +16,7 @@ import { addTokenToArray, extractTokensFromText } from './tokenUtils.js'; export function extractTokensFromCssVars( value: string, propertyName: string, - path: string[] = [], - TOKEN_REGEX: RegExp + path: string[] = [] ): TokenReference[] { let tokens: TokenReference[] = []; @@ -77,8 +76,7 @@ export function extractTokensFromCssVars( const fallbackTokens = extractTokensFromCssVars( fallback, propertyName, - path, - TOKEN_REGEX + path ); tokens.push(...fallbackTokens); } else { diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index 930488adb..863603b41 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -550,8 +550,7 @@ export function processImportedStringTokens( const cssVarTokens = extractTokensFromCssVars( span.text, propertyName, - path, - tokenRegex + path ); cssVarTokens.forEach((token) => { tokens.push({ @@ -601,8 +600,7 @@ export function processImportedStringTokens( const cssVarTokens = extractTokensFromCssVars( importedValue.value, propertyName, - path, - tokenRegex + path ); cssVarTokens.forEach((token) => { tokens.push({ diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts new file mode 100644 index 000000000..e581b4fbe --- /dev/null +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -0,0 +1,174 @@ +import { + Project, + Node, + SourceFile, + PropertyAssignment, + SpreadAssignment, + StringLiteral, + PropertyAccessExpression, + ObjectLiteralExpression, + CallExpression, + TemplateExpression, + Identifier, +} from 'ts-morph'; +import { TOKEN_REGEX, TokenReference } from './types'; +import { extractTokensFromCssVars } from './cssVarTokenExtractor'; +import { addTokenToArray, extractTokensFromText } from './tokenUtils'; +import { ImportedValue, processImportedStringTokens } from './importAnalyzer'; + +interface TokenResolverInfo { + node: T; + path: string[]; + parentName: string; + tokens: TokenReference[]; + importedValues: Map; +} + +/** + * Function that centarlizes the logic for resolving tokens from a node. + * Given that this is recursive logic, it's much easier to pass this back to itself. + * @param node + * @returns + */ +export const resolveToken = (info: TokenResolverInfo): TokenReference[] => { + const { node, tokens } = info; + + if (Node.isStringLiteral(node)) { + // Path in the event we need to process string literals, however this isn't used given tokens are stored as + // initialized values and imports. Generally, as property accessors or identifiers + // For now we'll leave a stub + return processStringLiteral(info as TokenResolverInfo); + } else if (Node.isTemplateExpression(node)) { + return processTemplateExpression( + info as TokenResolverInfo + ); + } else if (Node.isIdentifier(node)) { + return processIdentifier(info as TokenResolverInfo); + } else if (Node.isPropertyAccessExpression(node)) { + } else if (Node.isObjectLiteralExpression(node)) { + } else if (Node.isSpreadAssignment(node)) { + } else if ( + Node.isCallExpression(node) && + node.getExpression().getText() === 'createCustomFocusIndicatorStyle' + ) { + } else if (Node.isCallExpression(node)) { + } + + return tokens; +}; + +/** + * Stub for processing string literals which we don't need currently. + * @param node + * @returns + */ +const processStringLiteral = ( + info: TokenResolverInfo +): TokenReference[] => { + return info.tokens; +}; + +const processIdentifier = ( + info: TokenResolverInfo +): TokenReference[] => { + const { node, importedValues, parentName, path, tokens } = info; + + let returnTokens = tokens.slice(); + + const text = node.getText(); + + // First check if it matches the token regex directly + const matches = extractTokensFromText(node); + if (matches.length > 0) { + matches.forEach((match) => { + returnTokens = addTokenToArray( + { + property: path[path.length - 1] || parentName, + token: [match], + path, + }, + returnTokens + ); + }); + } + + // Then check if it's an imported value reference + if (importedValues && importedValues.has(text)) { + const importTokens = processImportedStringTokens( + importedValues, + path[path.length - 1] || parentName, + text, + path, + TOKEN_REGEX + ); + returnTokens.push(...importTokens); + } + + return returnTokens; +}; + +const processPropertyAccess = ( + node: PropertyAccessExpression +): TokenReference[] => { + return []; +}; + +const processObjectLiteral = ( + node: ObjectLiteralExpression +): TokenReference[] => { + return []; +}; + +const processSpreadAssignment = (node: SpreadAssignment): TokenReference[] => { + return []; +}; + +const processCallExpression = (node: CallExpression): TokenReference[] => { + return []; +}; + +/** + * + * @param info + * @returns + */ +const processTemplateExpression = ( + info: TokenResolverInfo +): TokenReference[] => { + /** + * This is where we should process template spans and feed it back into resolveToken. We also need to check that + * imported values are tokens etc. + */ + + const { node, path, parentName, tokens } = info; + const text = node.getText(); + + // Check for CSS var() syntax that might contain tokens + if (text.includes('var(')) { + const cssVarTokens = extractTokensFromCssVars( + text, + path[path.length - 1] || parentName, + path + ); + return addTokenToArray(cssVarTokens, tokens); + } else { + // Check for direct token references + const matches = extractTokensFromText(node); + + let returnTokens = tokens.slice(); + if (matches.length > 0) { + matches.forEach((match) => { + returnTokens = addTokenToArray( + { + property: path[path.length - 1] || parentName, + token: [match], + path, + }, + returnTokens + ); + }); + } + + return returnTokens; + } +}; From 28838ca5350fae87d1c971cf575ca50bac085d78 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 22 Apr 2025 15:11:02 -0700 Subject: [PATCH 33/75] additional refactors and clean up of logic. ensure we have separation of concerns --- packages/token-analyzer/src/astAnalyzer.ts | 94 ++----------- packages/token-analyzer/src/tokenResolver.ts | 139 +++++++++++++++++-- 2 files changed, 146 insertions(+), 87 deletions(-) diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index a00feaf12..13e96b8fb 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -54,11 +54,7 @@ function processStyleProperty( let tokens: TokenReference[] = []; const parentName = Node.isPropertyAssignment(prop) ? prop.getName() : ''; - function processNode(node?: Node, path: string[] = []): void { - if (!node) { - return; - } - + function processNode(node: Node, path: string[] = []): void { // If we're processing a reset style, we need to add the parent name to the path if (isResetStyles && path.length === 0 && parentName) { path.push(parentName); @@ -68,77 +64,14 @@ function processStyleProperty( if ( Node.isStringLiteral(node) || Node.isTemplateExpression(node) || - Node.isIdentifier(node) + Node.isIdentifier(node) || + Node.isPropertyAccessExpression(node) || + Node.isObjectLiteralExpression(node) || + Node.isSpreadAssignment(node) || + (Node.isCallExpression(node) && + node.getExpression().getText() === 'createCustomFocusIndicatorStyle') ) { tokens = resolveToken({ node, path, parentName, tokens, importedValues }); - } else if (Node.isPropertyAccessExpression(node)) { - const text = node.getText(); - const isToken = isTokenReference(text); - if (isToken) { - tokens = addTokenToArray( - { - property: path[path.length - 1] || parentName, - token: [text], - path, - }, - tokens - ); - } - } else if (Node.isObjectLiteralExpression(node)) { - node.getProperties().forEach((childProp) => { - if (Node.isPropertyAssignment(childProp)) { - const childName = childProp.getName(); - processNode(childProp.getInitializer(), [...path, childName]); - } else if (Node.isSpreadAssignment(childProp)) { - // Handle spread elements in object literals - processNode(childProp.getExpression(), path); - } - }); - } else if (Node.isSpreadAssignment(node)) { - // Handle spread elements - processNode(node.getExpression(), path); - } else if ( - Node.isCallExpression(node) && - node.getExpression().getText() === 'createCustomFocusIndicatorStyle' - ) { - const focus = `:focus`; - const focusWithin = `:focus-within`; - let nestedModifier = focus; - - const passedTokens = node.getArguments()[0]; - const passedOptions = node.getArguments()[1]; - - if (passedOptions && Node.isObjectLiteralExpression(passedOptions)) { - passedOptions.getProperties().forEach((property) => { - if (Node.isPropertyAssignment(property)) { - const optionName = property.getName(); - if (optionName === 'selector') { - const selectorType = property.getInitializer()?.getText(); - if (selectorType === 'focus') { - nestedModifier = focus; - } else if (selectorType === 'focus-within') { - nestedModifier = focusWithin; - } - } - } - }); - } - - if (passedTokens && Node.isObjectLiteralExpression(passedTokens)) { - passedTokens.getProperties().forEach((property) => { - if (Node.isPropertyAssignment(property)) { - const childName = property.getName(); - processNode(property.getInitializer(), [ - ...path, - nestedModifier, - childName, - ]); - } else if (Node.isSpreadAssignment(property)) { - // Handle spread elements in object literals within function arguments - processNode(property.getExpression(), [...path, nestedModifier]); - } - }); - } } else if (Node.isCallExpression(node)) { // Process calls like shorthands.borderColor(tokens.color) const functionName = node.getExpression().getText(); @@ -173,11 +106,14 @@ function processStyleProperty( argument.getProperties().forEach((property) => { if (Node.isPropertyAssignment(property)) { const childName = property.getName(); - processNode(property.getInitializer(), [ - ...path, - functionName, - childName, - ]); + const childInitializer = property.getInitializer(); + if (childInitializer) { + processNode(childInitializer, [ + ...path, + functionName, + childName, + ]); + } } }); } diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index e581b4fbe..bc5b21434 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -13,8 +13,13 @@ import { } from 'ts-morph'; import { TOKEN_REGEX, TokenReference } from './types'; import { extractTokensFromCssVars } from './cssVarTokenExtractor'; -import { addTokenToArray, extractTokensFromText } from './tokenUtils'; +import { + addTokenToArray, + extractTokensFromText, + isTokenReference, +} from './tokenUtils'; import { ImportedValue, processImportedStringTokens } from './importAnalyzer'; +import { resolve } from 'path'; interface TokenResolverInfo { node: T; @@ -45,13 +50,28 @@ export const resolveToken = (info: TokenResolverInfo): TokenReference[] => { } else if (Node.isIdentifier(node)) { return processIdentifier(info as TokenResolverInfo); } else if (Node.isPropertyAccessExpression(node)) { + return processPropertyAccess( + info as TokenResolverInfo + ); } else if (Node.isObjectLiteralExpression(node)) { + return processObjectLiteral( + info as TokenResolverInfo + ); } else if (Node.isSpreadAssignment(node)) { + return processSpreadAssignment(info as TokenResolverInfo); } else if ( Node.isCallExpression(node) && node.getExpression().getText() === 'createCustomFocusIndicatorStyle' ) { + return processFocusCallExpression( + info as TokenResolverInfo + ); } else if (Node.isCallExpression(node)) { + return processCallExpression(info as TokenResolverInfo); + } else if (Node.isPropertyAssignment(node)) { + return processPropertyAssignment( + info as TokenResolverInfo + ); } return tokens; @@ -108,22 +128,107 @@ const processIdentifier = ( }; const processPropertyAccess = ( - node: PropertyAccessExpression + info: TokenResolverInfo ): TokenReference[] => { - return []; + const { node, parentName, path, tokens } = info; + + const text = node.getText(); + const isToken = isTokenReference(text); + if (isToken) { + return addTokenToArray( + { + property: path[path.length - 1] || parentName, + token: [text], + path, + }, + tokens + ); + } + return tokens; }; const processObjectLiteral = ( - node: ObjectLiteralExpression + info: TokenResolverInfo ): TokenReference[] => { - return []; + const { node, parentName, path, tokens, importedValues } = info; + + let returnTokens = tokens.slice(); + node.getProperties().forEach((childProp) => { + returnTokens = returnTokens.concat( + resolveToken({ + node: childProp, + path, + parentName, + tokens, + importedValues, + }) + ); + }); + return returnTokens; }; -const processSpreadAssignment = (node: SpreadAssignment): TokenReference[] => { - return []; +const processSpreadAssignment = ( + info: TokenResolverInfo +): TokenReference[] => { + const { node, path, parentName, tokens, importedValues } = info; + return tokens.concat( + resolveToken({ + node: node.getExpression(), + path, + parentName, + tokens, + importedValues, + }) + ); }; -const processCallExpression = (node: CallExpression): TokenReference[] => { +const processFocusCallExpression = ( + info: TokenResolverInfo +): TokenReference[] => { + const { node, path, parentName, tokens, importedValues } = info; + + const focus = `:focus`; + const focusWithin = `:focus-within`; + let nestedModifier = focus; + + const passedTokens = node.getArguments()[0]; + const passedOptions = node.getArguments()[1]; + + // Parse out the options being passed to the focus funuction and determine which selector is being used + if (passedOptions && Node.isObjectLiteralExpression(passedOptions)) { + passedOptions.getProperties().forEach((property) => { + if (Node.isPropertyAssignment(property)) { + const optionName = property.getName(); + if (optionName === 'selector') { + const selectorType = property.getInitializer()?.getText(); + if (selectorType === 'focus') { + nestedModifier = focus; + } else if (selectorType === 'focus-within') { + nestedModifier = focusWithin; + } + } + } + }); + } + + if (passedTokens) { + // We can simplify the logic since we process node types and extract within resolveTokens. We merely need to pass + // the updated path + return resolveToken({ + node: passedTokens, + path: [...path, nestedModifier], + parentName, + tokens, + importedValues, + }); + } + + return tokens; +}; + +const processCallExpression = ( + info: TokenResolverInfo +): TokenReference[] => { return []; }; @@ -172,3 +277,21 @@ const processTemplateExpression = ( return returnTokens; } }; + +const processPropertyAssignment = ( + info: TokenResolverInfo +): TokenReference[] => { + const { node, path, parentName, tokens, importedValues } = info; + + const childName = node.getName(); + const newPath = [...path, childName]; + const propertyNode = node.getInitializer(); + + return resolveToken({ + node: propertyNode || node, + path: newPath, + parentName, + tokens, + importedValues, + }); +}; From ff6c6c4142f1fd13bbacc8a46cea0d9a1b3a7388 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 22 Apr 2025 23:43:02 -0700 Subject: [PATCH 34/75] move last functionality into token resolver --- packages/token-analyzer/src/astAnalyzer.ts | 62 +------------------- packages/token-analyzer/src/tokenResolver.ts | 46 ++++++++++++++- 2 files changed, 47 insertions(+), 61 deletions(-) diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index 13e96b8fb..21ea94864 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -69,68 +69,10 @@ function processStyleProperty( Node.isObjectLiteralExpression(node) || Node.isSpreadAssignment(node) || (Node.isCallExpression(node) && - node.getExpression().getText() === 'createCustomFocusIndicatorStyle') + node.getExpression().getText() === 'createCustomFocusIndicatorStyle') || + Node.isCallExpression(node) ) { tokens = resolveToken({ node, path, parentName, tokens, importedValues }); - } else if (Node.isCallExpression(node)) { - // Process calls like shorthands.borderColor(tokens.color) - const functionName = node.getExpression().getText(); - - // check if we're using a shorthand function and get the output of a call based on parameters passed into the function - const affectedProperties = getPropertiesForShorthand( - functionName, - node.getArguments() - ); - - // If we have a shorthand function, we need to process the affected properties. - // getPropertiesForShorthand will return an array of objects - // with the property name and the token reference - // e.g. { property: 'borderColor', token: 'tokens.color' } - // It will also deeply check for initialized values etc and validate they are tokens - if (affectedProperties.length > 0) { - // Process each argument and apply it to all affected properties - affectedProperties.forEach((argument) => { - tokens = addTokenToArray( - { - property: argument.property, - token: [argument.token], - path: path.concat(argument.property), - }, - tokens - ); - }); - } else { - // Generic handling of functions that are not whitelisted - node.getArguments().forEach((argument) => { - if (Node.isObjectLiteralExpression(argument)) { - argument.getProperties().forEach((property) => { - if (Node.isPropertyAssignment(property)) { - const childName = property.getName(); - const childInitializer = property.getInitializer(); - if (childInitializer) { - processNode(childInitializer, [ - ...path, - functionName, - childName, - ]); - } - } - }); - } - // Check for string literals in function arguments that might contain CSS variables with tokens - if (Node.isStringLiteral(argument)) { - const text = argument.getText(); - if (text.includes('var(')) { - const cssVarTokens = extractTokensFromCssVars( - text, - path[path.length - 1] || parentName, - [...path, functionName] - ); - tokens.push(...cssVarTokens); - } - } - }); - } } } diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index bc5b21434..1d146f8b6 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -16,6 +16,7 @@ import { extractTokensFromCssVars } from './cssVarTokenExtractor'; import { addTokenToArray, extractTokensFromText, + getPropertiesForShorthand, isTokenReference, } from './tokenUtils'; import { ImportedValue, processImportedStringTokens } from './importAnalyzer'; @@ -229,7 +230,50 @@ const processFocusCallExpression = ( const processCallExpression = ( info: TokenResolverInfo ): TokenReference[] => { - return []; + const { node, path, parentName, tokens, importedValues } = info; + + let returnTokens = tokens.slice(); + // Process calls like shorthands.borderColor(tokens.color) + const functionName = node.getExpression().getText(); + + // check if we're using a shorthand function and get the output of a call based on parameters passed into the function + const affectedProperties = getPropertiesForShorthand( + functionName, + node.getArguments() + ); + + // If we have a shorthand function, we need to process the affected properties. + // getPropertiesForShorthand will return an array of objects + // with the property name and the token reference + // e.g. { property: 'borderColor', token: 'tokens.color' } + // It will also deeply check for initialized values etc and validate they are tokens + if (affectedProperties.length > 0) { + // Process each argument and apply it to all affected properties + affectedProperties.forEach((argument) => { + returnTokens = addTokenToArray( + { + property: argument.property, + token: [argument.token], + path: path.concat(argument.property), + }, + returnTokens + ); + }); + } else { + // Generic handling of functions that are not whitelisted + node.getArguments().forEach((argument) => { + returnTokens = returnTokens.concat( + resolveToken({ + node: argument, + path: [...path, functionName], + parentName, + tokens: returnTokens, + importedValues, + }) + ); + }); + } + return returnTokens; }; /** From ad940636224342eadb2d921f5939fa48715384ac Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 23 Apr 2025 02:22:15 -0700 Subject: [PATCH 35/75] additional clean up --- packages/token-analyzer/src/astAnalyzer.ts | 42 +++++++--------------- 1 file changed, 12 insertions(+), 30 deletions(-) diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index 21ea94864..2cfbb4024 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -54,36 +54,18 @@ function processStyleProperty( let tokens: TokenReference[] = []; const parentName = Node.isPropertyAssignment(prop) ? prop.getName() : ''; - function processNode(node: Node, path: string[] = []): void { - // If we're processing a reset style, we need to add the parent name to the path - if (isResetStyles && path.length === 0 && parentName) { - path.push(parentName); - } - - // Check for string literals or template expressions (string template literals) - if ( - Node.isStringLiteral(node) || - Node.isTemplateExpression(node) || - Node.isIdentifier(node) || - Node.isPropertyAccessExpression(node) || - Node.isObjectLiteralExpression(node) || - Node.isSpreadAssignment(node) || - (Node.isCallExpression(node) && - node.getExpression().getText() === 'createCustomFocusIndicatorStyle') || - Node.isCallExpression(node) - ) { - tokens = resolveToken({ node, path, parentName, tokens, importedValues }); - } - } - - if (Node.isPropertyAssignment(prop)) { - const initializer = prop.getInitializer(); - if (initializer) { - processNode(initializer); - } - } else if (Node.isSpreadAssignment(prop)) { - processNode(prop.getExpression()); - } + const path = isResetStyles && parentName ? [parentName] : []; + + // resolve all the tokens within our style recursively. This is encapsulated within the resolveToken function + tokens = resolveToken({ + node: Node.isPropertyAssignment(prop) + ? prop.getInitializer() ?? prop + : prop, + path, + parentName, + tokens, + importedValues, + }); return tokens; } From 36f98f81d0637d612123ee4411c3ce7fd1552f70 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 23 Apr 2025 02:38:07 -0700 Subject: [PATCH 36/75] clean up files custom prettier config add additional todos move processImportedStringTokens into token resolver --- packages/token-analyzer/.prettierrc | 4 + packages/token-analyzer/README.md | 6 + packages/token-analyzer/src/astAnalyzer.ts | 104 ++----- packages/token-analyzer/src/importAnalyzer.ts | 265 ++---------------- packages/token-analyzer/src/tokenResolver.ts | 237 +++++++++++----- 5 files changed, 214 insertions(+), 402 deletions(-) create mode 100644 packages/token-analyzer/.prettierrc diff --git a/packages/token-analyzer/.prettierrc b/packages/token-analyzer/.prettierrc new file mode 100644 index 000000000..0981b7cc0 --- /dev/null +++ b/packages/token-analyzer/.prettierrc @@ -0,0 +1,4 @@ +{ + "singleQuote": true, + "printWidth": 120 +} diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index c955d68da..f47399ea2 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -20,6 +20,12 @@ A static analysis tool that scans your project's style files to track and analyz - process merge styles and map meta data to styles - parse through each property of styles - with each property,we should look at whether an initializer is there, spread, delaration etc and then determine if that's a token. if it is, we also need to see if there's a fallback chain and not just log a token but also log the tokens in the right order (this should also open the door to ensure we don't over complicate or duplicate logic here) + - The data flow complexity is a bit high currently and we should only recurse where we actually need to. + - property set in styles -> analyze type (expression call, initializer, declaration, etc) -> resolve given import information, type and rules -> Once we resolve, analyze if it's a token which should be a single call so we can centralize it -> return token with path, value, etc. This should include priority order if we have a var() fallback structure. +- we need to update isToken to resolve to which package/module it's imported from. +- We also need to do this for shorthands +- We should write a function that does this from a node and follows it up the import chain. +- Clean processImportedStringTokens to point back to resolveToken as there's some duplication there ## Features diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index 2cfbb4024..81e9ed567 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -1,10 +1,4 @@ -import { - Project, - Node, - SourceFile, - PropertyAssignment, - SpreadAssignment, -} from 'ts-morph'; +import { Project, Node, SourceFile, PropertyAssignment, SpreadAssignment } from 'ts-morph'; import { TokenReference, StyleAnalysis, @@ -16,12 +10,6 @@ import { } from './types.js'; import { log, measure, measureAsync } from './debugUtils.js'; import { analyzeImports, ImportedValue } from './importAnalyzer.js'; -import { extractTokensFromCssVars } from './cssVarTokenExtractor.js'; -import { - addTokenToArray, - getPropertiesForShorthand, - isTokenReference, -} from './tokenUtils.js'; import { resolveToken } from './tokenResolver'; const makeResetStylesToken = 'resetStyles'; @@ -58,9 +46,7 @@ function processStyleProperty( // resolve all the tokens within our style recursively. This is encapsulated within the resolveToken function tokens = resolveToken({ - node: Node.isPropertyAssignment(prop) - ? prop.getInitializer() ?? prop - : prop, + node: Node.isPropertyAssignment(prop) ? prop.getInitializer() ?? prop : prop, path, parentName, tokens, @@ -77,10 +63,7 @@ function analyzeMergeClasses(sourceFile: SourceFile): StyleMapping[] { const mappings: StyleMapping[] = []; sourceFile.forEachDescendant((node) => { - if ( - Node.isCallExpression(node) && - node.getExpression().getText() === 'mergeClasses' - ) { + if (Node.isCallExpression(node) && node.getExpression().getText() === 'mergeClasses') { const parentNode = node.getParent(); let slotName = ''; if (Node.isBinaryExpression(parentNode)) { @@ -163,11 +146,7 @@ function createStyleContent(tokens: TokenReference[]): StyleContent { * @param pathIndex where in the path we are, this allows us to preserve the path while recursing through it * @param currentLevel the current level of the nested structure we're working on */ - const createNestedStructure = ( - token: TokenReference, - pathIndex: number, - currentLevel: StyleTokens - ) => { + const createNestedStructure = (token: TokenReference, pathIndex: number, currentLevel: StyleTokens) => { const nestedKey = token.path[pathIndex]; // if no token array exists, create one @@ -182,11 +161,7 @@ function createStyleContent(tokens: TokenReference[]): StyleContent { if (!currentLevel[nestedKey].nested) { currentLevel[nestedKey].nested = {}; } - createNestedStructure( - token, - pathIndex + 1, - currentLevel[nestedKey].nested - ); + createNestedStructure(token, pathIndex + 1, currentLevel[nestedKey].nested); } else { currentLevel[nestedKey].tokens.push({ ...token, @@ -227,8 +202,7 @@ function createMetadata(styleMappings: StyleMapping[]): StyleMetadata { mapping.conditionalStyles.forEach(({ style, condition }) => { if (metadata.styleConditions[style]) { - metadata.styleConditions[style].conditions = - metadata.styleConditions[style].conditions || []; + metadata.styleConditions[style].conditions = metadata.styleConditions[style].conditions || []; if (condition) { metadata.styleConditions[style].conditions!.push(condition); } @@ -254,16 +228,10 @@ async function analyzeMakeStyles( const analysis: StyleAnalysis = {}; sourceFile.forEachDescendant((node) => { - if ( - Node.isCallExpression(node) && - node.getExpression().getText() === 'makeStyles' - ) { + if (Node.isCallExpression(node) && node.getExpression().getText() === 'makeStyles') { const stylesArg = node.getArguments()[0]; const parentNode = node.getParent(); - if ( - Node.isObjectLiteralExpression(stylesArg) && - Node.isVariableDeclaration(parentNode) - ) { + if (Node.isObjectLiteralExpression(stylesArg) && Node.isVariableDeclaration(parentNode)) { // Process the styles object stylesArg.getProperties().forEach((prop) => { if (Node.isPropertyAssignment(prop)) { @@ -279,10 +247,7 @@ async function analyzeMakeStyles( } }); } - } else if ( - Node.isCallExpression(node) && - node.getExpression().getText() === 'makeResetStyles' - ) { + } else if (Node.isCallExpression(node) && node.getExpression().getText() === 'makeResetStyles') { // Similar to above, but the styles are stored under the assigned function name instead of local variable const stylesArg = node.getArguments()[0]; const parentNode = node.getParent(); @@ -300,16 +265,13 @@ async function analyzeMakeStyles( if (Node.isObjectLiteralExpression(stylesArg)) { // Process the styles object stylesArg.getProperties().forEach((prop) => { - if ( - Node.isPropertyAssignment(prop) || - Node.isSpreadAssignment(prop) - ) { + if (Node.isPropertyAssignment(prop) || Node.isSpreadAssignment(prop)) { const tokens = processStyleProperty(prop, importedValues, true); if (tokens.length) { const styleContent = createStyleContent(tokens); - analysis[functionName][makeResetStylesToken].tokens = analysis[ - functionName - ][makeResetStylesToken].tokens.concat(styleContent.tokens); + analysis[functionName][makeResetStylesToken].tokens = analysis[functionName][ + makeResetStylesToken + ].tokens.concat(styleContent.tokens); analysis[functionName][makeResetStylesToken].nested = { ...analysis[functionName][makeResetStylesToken].nested, ...styleContent.nested, @@ -329,10 +291,7 @@ async function analyzeMakeStyles( // We do a second parse to link known style functions (i.e. makeResetStyles assigned function variable names). // This is necessary to handle cases where we're using a variable directly in mergeClasses to link styles. - if ( - Node.isCallExpression(node) && - styleFunctionNames.includes(node.getExpression().getText()) - ) { + if (Node.isCallExpression(node) && styleFunctionNames.includes(node.getExpression().getText())) { const parentNode = node.getParent(); const functionName = node.getExpression().getText(); if (Node.isVariableDeclaration(parentNode)) { @@ -349,15 +308,10 @@ async function analyzeMakeStyles( // Store our makeResetStyles assigned variables in the analysis to link later variables.forEach((variable) => { Object.keys(analysis[variable.functionName]).forEach((styleName) => { - if ( - analysis[variable.functionName][styleName].assignedVariables === - undefined - ) { + if (analysis[variable.functionName][styleName].assignedVariables === undefined) { analysis[variable.functionName][styleName].assignedVariables = []; } - analysis[variable.functionName][styleName].assignedVariables?.push( - variable.variableName - ); + analysis[variable.functionName][styleName].assignedVariables?.push(variable.variableName); }); }); @@ -367,29 +321,21 @@ async function analyzeMakeStyles( /** * Combines mergeClasses and makeStyles analysis, with import resolution */ -async function analyzeFile( - filePath: string, - project: Project -): Promise { +async function analyzeFile(filePath: string, project: Project): Promise { log(`Analyzing ${filePath}`); const sourceFile = project.addSourceFileAtPath(filePath); // First analyze imports to find imported string values log('Analyzing imports to find imported token values'); - const importedValues = await measureAsync('analyze imports', () => - analyzeImports(sourceFile, project) - ); + const importedValues = await measureAsync('analyze imports', () => analyzeImports(sourceFile, project)); // Second pass: Analyze mergeClasses - const styleMappings = measure('analyze mergeClasses', () => - analyzeMergeClasses(sourceFile) - ); + const styleMappings = measure('analyze mergeClasses', () => analyzeMergeClasses(sourceFile)); // Third pass: Analyze makeStyles with imported values - const styleAnalysis = await measureAsync( - 'analyze makeStyles', - () => analyzeMakeStyles(sourceFile, importedValues) + const styleAnalysis = await measureAsync('analyze makeStyles', () => + analyzeMakeStyles(sourceFile, importedValues) ); // Create enhanced analysis with separated styles and metadata @@ -399,11 +345,5 @@ async function analyzeFile( }; } -export { - analyzeFile, - processStyleProperty, - analyzeMergeClasses, - analyzeMakeStyles, - createStyleContent, -}; +export { analyzeFile, processStyleProperty, analyzeMergeClasses, analyzeMakeStyles, createStyleContent }; export type { StyleMapping }; diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index 863603b41..d2c0f6b9c 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -1,22 +1,9 @@ // importAnalyzer.ts -import { - Project, - Node, - SourceFile, - ImportDeclaration, - Symbol, - TypeChecker, - SyntaxKind, -} from 'ts-morph'; +import { Project, Node, SourceFile, ImportDeclaration, Symbol, TypeChecker, SyntaxKind } from 'ts-morph'; import { log } from './debugUtils.js'; -import { TokenReference, TOKEN_REGEX } from './types.js'; +import { TokenReference } from './types.js'; import { getModuleSourceFile } from './moduleResolver.js'; -import { extractTokensFromCssVars } from './cssVarTokenExtractor.js'; -import { - isTokenReference, - extractTokensFromText, - addTokenToArray, -} from './tokenUtils.js'; +import { isTokenReference } from './tokenUtils.js'; /** * Represents a portion of a template expression @@ -44,10 +31,7 @@ export interface ImportedValue { /** * Analyzes imports in a source file to extract string values */ -export async function analyzeImports( - sourceFile: SourceFile, - project: Project -): Promise> { +export async function analyzeImports(sourceFile: SourceFile, project: Project): Promise> { const importedValues = new Map(); const filePath = sourceFile.getFilePath(); @@ -60,18 +44,9 @@ export async function analyzeImports( for (const importDecl of sourceFile.getImportDeclarations()) { try { // Process the import declaration - await processImportDeclaration( - importDecl, - sourceFile, - project, - importedValues, - typeChecker - ); + await processImportDeclaration(importDecl, sourceFile, project, importedValues, typeChecker); } catch (err) { - log( - `Error processing import: ${importDecl.getModuleSpecifierValue()}`, - err - ); + log(`Error processing import: ${importDecl.getModuleSpecifierValue()}`, err); } } @@ -92,11 +67,7 @@ async function processImportDeclaration( const containingFilePath = sourceFile.getFilePath(); // Use our module resolver to get the imported file - const importedFile = getModuleSourceFile( - project, - moduleSpecifier, - containingFilePath - ); + const importedFile = getModuleSourceFile(project, moduleSpecifier, containingFilePath); if (!importedFile) { log(`Could not resolve module: ${moduleSpecifier}`); @@ -104,22 +75,10 @@ async function processImportDeclaration( } // Process named imports (import { x } from 'module') - processNamedImports( - importDecl, - importedFile, - project, - importedValues, - typeChecker - ); + processNamedImports(importDecl, importedFile, project, importedValues, typeChecker); // Process default import (import x from 'module') - processDefaultImport( - importDecl, - importedFile, - project, - importedValues, - typeChecker - ); + processDefaultImport(importDecl, importedFile, project, importedValues, typeChecker); } /** @@ -137,11 +96,7 @@ function processNamedImports( const alias = namedImport.getAliasNode()?.getText() || importName; // Find the export's true source using TypeScript's type checker - const exportInfo = findExportDeclaration( - importedFile, - importName, - typeChecker - ); + const exportInfo = findExportDeclaration(importedFile, importName, typeChecker); if (exportInfo) { const { declaration, sourceFile: declarationFile } = exportInfo; @@ -157,11 +112,7 @@ function processNamedImports( templateSpans: valueInfo.templateSpans, }); - log( - `Added imported value: ${alias} = ${ - valueInfo.value - } from ${declarationFile.getFilePath()}` - ); + log(`Added imported value: ${alias} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); } } } @@ -185,11 +136,7 @@ function processDefaultImport( const importName = defaultImport.getText(); // Find the default export's true source - const exportInfo = findExportDeclaration( - importedFile, - 'default', - typeChecker - ); + const exportInfo = findExportDeclaration(importedFile, 'default', typeChecker); if (exportInfo) { const { declaration, sourceFile: declarationFile } = exportInfo; @@ -205,11 +152,7 @@ function processDefaultImport( templateSpans: valueInfo.templateSpans, }); - log( - `Added default import: ${importName} = ${ - valueInfo.value - } from ${declarationFile.getFilePath()}` - ); + log(`Added default import: ${importName} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); } } } @@ -238,13 +181,9 @@ function findExportDeclaration( } // Find the specific export we're looking for - const exportSymbol = exports.find( - (symbol: Symbol) => symbol.getName() === exportName - ); + const exportSymbol = exports.find((symbol: Symbol) => symbol.getName() === exportName); if (!exportSymbol) { - log( - `Export symbol '${exportName}' not found in ${sourceFile.getFilePath()}` - ); + log(`Export symbol '${exportName}' not found in ${sourceFile.getFilePath()}`); return undefined; } @@ -298,9 +237,7 @@ function findExportDeclaration( function extractValueFromDeclaration( declaration: Node, typeChecker: TypeChecker -): - | { value: string; isLiteral: boolean; templateSpans?: TemplateSpan[] } - | undefined { +): { value: string; isLiteral: boolean; templateSpans?: TemplateSpan[] } | undefined { // Handle variable declarations if (Node.isVariableDeclaration(declaration)) { const initializer = declaration.getInitializer(); @@ -319,9 +256,7 @@ function extractValueFromDeclaration( const sourceFile = declaration.getSourceFile(); // Find the local declaration with this name - for (const varDecl of sourceFile.getDescendantsOfKind( - SyntaxKind.VariableDeclaration - )) { + for (const varDecl of sourceFile.getDescendantsOfKind(SyntaxKind.VariableDeclaration)) { if (varDecl.getName() === name) { const initializer = varDecl.getInitializer(); return extractValueFromExpression(initializer, typeChecker); @@ -378,10 +313,7 @@ function extractValueFromExpression( const literal = span.getLiteral().getLiteralText(); // Handle different types of expressions in template spans - if ( - Node.isPropertyAccessExpression(spanExpr) && - isTokenReference(spanExpr) - ) { + if (Node.isPropertyAccessExpression(spanExpr) && isTokenReference(spanExpr)) { // Direct token reference in template span templateSpans.push({ text: spanText, @@ -489,164 +421,3 @@ function extractValueFromExpression( // Default case for unhandled expression types return undefined; } - -/** - * Process string tokens in imported values - */ -export function processImportedStringTokens( - importedValues: Map, - propertyName: string, - value: string, - path: string[] = [], - tokenRegex: RegExp = TOKEN_REGEX -): TokenReference[] { - let tokens: TokenReference[] = []; - - // Check if the value is an imported value reference - if (importedValues.has(value)) { - const importedValue = importedValues.get(value)!; - - // If we've already pre-resolved tokens for this value, use them - if (importedValue.resolvedTokens) { - return importedValue.resolvedTokens.map((token) => ({ - ...token, - property: propertyName, // Update property name for current context - path: path, // Update path for current context - })); - } - - if (importedValue.isLiteral) { - if (importedValue.templateSpans) { - // Process template spans specially - for (const span of importedValue.templateSpans) { - if (span.isToken) { - // Direct token reference in span - tokens = addTokenToArray( - { - property: propertyName, - token: [span.text], - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }, - tokens - ); - } else if ( - span.isReference && - span.referenceName && - importedValues.has(span.referenceName) - ) { - // Reference to another imported value - process recursively - const spanTokens = processImportedStringTokens( - importedValues, - propertyName, - span.referenceName, - path, - tokenRegex - ); - tokens.push(...spanTokens); - } else if (span.text.includes('var(')) { - // Check for CSS variables in the span text - const cssVarTokens = extractTokensFromCssVars( - span.text, - propertyName, - path - ); - cssVarTokens.forEach((token) => { - tokens.push({ - ...token, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); - }); - } else { - // Check for direct token matches in non-reference spans - const matches = extractTokensFromText(span.text); - if (matches.length > 0) { - matches.forEach((match) => { - tokens = addTokenToArray( - { - property: propertyName, - token: [match], - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }, - tokens - ); - }); - } - } - } - } else { - // Standard processing for literals without spans - // First, check for direct token references - const matches = extractTokensFromText(importedValue.value); - if (matches.length > 0) { - matches.forEach((match) => { - tokens = addTokenToArray( - { - property: propertyName, - token: [match], - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }, - tokens - ); - }); - } else if (importedValue.value.includes('var(')) { - // Then check for CSS variable patterns - const cssVarTokens = extractTokensFromCssVars( - importedValue.value, - propertyName, - path - ); - cssVarTokens.forEach((token) => { - tokens.push({ - ...token, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); - }); - } - } - } else { - // Non-literal values (like property access expressions) - if (isTokenReference(importedValue.value)) { - tokens = addTokenToArray( - { - property: propertyName, - token: [importedValue.value], - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }, - tokens - ); - } else { - // Check for any token references in the value - const matches = extractTokensFromText(importedValue.value); - if (matches.length > 0) { - matches.forEach((match) => { - tokens = addTokenToArray( - { - property: propertyName, - token: [match], - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }, - tokens - ); - }); - } - } - } - - // Cache the resolved tokens for future use - importedValue.resolvedTokens = tokens.map((token) => ({ ...token })); - } - - return tokens; -} diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index 1d146f8b6..556357fb9 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -1,7 +1,5 @@ import { - Project, Node, - SourceFile, PropertyAssignment, SpreadAssignment, StringLiteral, @@ -11,16 +9,10 @@ import { TemplateExpression, Identifier, } from 'ts-morph'; -import { TOKEN_REGEX, TokenReference } from './types'; +import { TokenReference } from './types'; import { extractTokensFromCssVars } from './cssVarTokenExtractor'; -import { - addTokenToArray, - extractTokensFromText, - getPropertiesForShorthand, - isTokenReference, -} from './tokenUtils'; -import { ImportedValue, processImportedStringTokens } from './importAnalyzer'; -import { resolve } from 'path'; +import { addTokenToArray, extractTokensFromText, getPropertiesForShorthand, isTokenReference } from './tokenUtils'; +import { ImportedValue } from './importAnalyzer'; interface TokenResolverInfo { node: T; @@ -45,34 +37,21 @@ export const resolveToken = (info: TokenResolverInfo): TokenReference[] => { // For now we'll leave a stub return processStringLiteral(info as TokenResolverInfo); } else if (Node.isTemplateExpression(node)) { - return processTemplateExpression( - info as TokenResolverInfo - ); + return processTemplateExpression(info as TokenResolverInfo); } else if (Node.isIdentifier(node)) { return processIdentifier(info as TokenResolverInfo); } else if (Node.isPropertyAccessExpression(node)) { - return processPropertyAccess( - info as TokenResolverInfo - ); + return processPropertyAccess(info as TokenResolverInfo); } else if (Node.isObjectLiteralExpression(node)) { - return processObjectLiteral( - info as TokenResolverInfo - ); + return processObjectLiteral(info as TokenResolverInfo); } else if (Node.isSpreadAssignment(node)) { return processSpreadAssignment(info as TokenResolverInfo); - } else if ( - Node.isCallExpression(node) && - node.getExpression().getText() === 'createCustomFocusIndicatorStyle' - ) { - return processFocusCallExpression( - info as TokenResolverInfo - ); + } else if (Node.isCallExpression(node) && node.getExpression().getText() === 'createCustomFocusIndicatorStyle') { + return processFocusCallExpression(info as TokenResolverInfo); } else if (Node.isCallExpression(node)) { return processCallExpression(info as TokenResolverInfo); } else if (Node.isPropertyAssignment(node)) { - return processPropertyAssignment( - info as TokenResolverInfo - ); + return processPropertyAssignment(info as TokenResolverInfo); } return tokens; @@ -83,15 +62,11 @@ export const resolveToken = (info: TokenResolverInfo): TokenReference[] => { * @param node * @returns */ -const processStringLiteral = ( - info: TokenResolverInfo -): TokenReference[] => { +const processStringLiteral = (info: TokenResolverInfo): TokenReference[] => { return info.tokens; }; -const processIdentifier = ( - info: TokenResolverInfo -): TokenReference[] => { +const processIdentifier = (info: TokenResolverInfo): TokenReference[] => { const { node, importedValues, parentName, path, tokens } = info; let returnTokens = tokens.slice(); @@ -115,22 +90,14 @@ const processIdentifier = ( // Then check if it's an imported value reference if (importedValues && importedValues.has(text)) { - const importTokens = processImportedStringTokens( - importedValues, - path[path.length - 1] || parentName, - text, - path, - TOKEN_REGEX - ); + const importTokens = processImportedStringTokens(importedValues, path[path.length - 1] || parentName, text, path); returnTokens.push(...importTokens); } return returnTokens; }; -const processPropertyAccess = ( - info: TokenResolverInfo -): TokenReference[] => { +const processPropertyAccess = (info: TokenResolverInfo): TokenReference[] => { const { node, parentName, path, tokens } = info; const text = node.getText(); @@ -148,9 +115,7 @@ const processPropertyAccess = ( return tokens; }; -const processObjectLiteral = ( - info: TokenResolverInfo -): TokenReference[] => { +const processObjectLiteral = (info: TokenResolverInfo): TokenReference[] => { const { node, parentName, path, tokens, importedValues } = info; let returnTokens = tokens.slice(); @@ -168,9 +133,7 @@ const processObjectLiteral = ( return returnTokens; }; -const processSpreadAssignment = ( - info: TokenResolverInfo -): TokenReference[] => { +const processSpreadAssignment = (info: TokenResolverInfo): TokenReference[] => { const { node, path, parentName, tokens, importedValues } = info; return tokens.concat( resolveToken({ @@ -183,9 +146,7 @@ const processSpreadAssignment = ( ); }; -const processFocusCallExpression = ( - info: TokenResolverInfo -): TokenReference[] => { +const processFocusCallExpression = (info: TokenResolverInfo): TokenReference[] => { const { node, path, parentName, tokens, importedValues } = info; const focus = `:focus`; @@ -227,9 +188,7 @@ const processFocusCallExpression = ( return tokens; }; -const processCallExpression = ( - info: TokenResolverInfo -): TokenReference[] => { +const processCallExpression = (info: TokenResolverInfo): TokenReference[] => { const { node, path, parentName, tokens, importedValues } = info; let returnTokens = tokens.slice(); @@ -237,10 +196,7 @@ const processCallExpression = ( const functionName = node.getExpression().getText(); // check if we're using a shorthand function and get the output of a call based on parameters passed into the function - const affectedProperties = getPropertiesForShorthand( - functionName, - node.getArguments() - ); + const affectedProperties = getPropertiesForShorthand(functionName, node.getArguments()); // If we have a shorthand function, we need to process the affected properties. // getPropertiesForShorthand will return an array of objects @@ -281,9 +237,7 @@ const processCallExpression = ( * @param info * @returns */ -const processTemplateExpression = ( - info: TokenResolverInfo -): TokenReference[] => { +const processTemplateExpression = (info: TokenResolverInfo): TokenReference[] => { /** * This is where we should process template spans and feed it back into resolveToken. We also need to check that * imported values are tokens etc. @@ -294,11 +248,7 @@ const processTemplateExpression = ( // Check for CSS var() syntax that might contain tokens if (text.includes('var(')) { - const cssVarTokens = extractTokensFromCssVars( - text, - path[path.length - 1] || parentName, - path - ); + const cssVarTokens = extractTokensFromCssVars(text, path[path.length - 1] || parentName, path); return addTokenToArray(cssVarTokens, tokens); } else { // Check for direct token references @@ -322,9 +272,7 @@ const processTemplateExpression = ( } }; -const processPropertyAssignment = ( - info: TokenResolverInfo -): TokenReference[] => { +const processPropertyAssignment = (info: TokenResolverInfo): TokenReference[] => { const { node, path, parentName, tokens, importedValues } = info; const childName = node.getName(); @@ -339,3 +287,146 @@ const processPropertyAssignment = ( importedValues, }); }; + +/** + * Process string tokens in imported values + */ +export function processImportedStringTokens( + importedValues: Map, + propertyName: string, + value: string, + path: string[] = [] +): TokenReference[] { + let tokens: TokenReference[] = []; + + // Check if the value is an imported value reference + if (importedValues.has(value)) { + // Cast to ImportedValue as we know the value exists + const importedValue = importedValues.get(value) as ImportedValue; + + // If we've already pre-resolved tokens for this value, use them + if (importedValue.resolvedTokens) { + return importedValue.resolvedTokens.map((token) => ({ + ...token, + property: propertyName, // Update property name for current context + path: path, // Update path for current context + })); + } + + if (importedValue.isLiteral) { + if (importedValue.templateSpans) { + // Process template spans specially + for (const span of importedValue.templateSpans) { + if (span.isToken) { + // Direct token reference in span + tokens = addTokenToArray( + { + property: propertyName, + token: [span.text], + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }, + tokens + ); + } else if (span.isReference && span.referenceName && importedValues.has(span.referenceName)) { + // Reference to another imported value - process recursively + const spanTokens = processImportedStringTokens(importedValues, propertyName, span.referenceName, path); + tokens.push(...spanTokens); + } else if (span.text.includes('var(')) { + // Check for CSS variables in the span text + const cssVarTokens = extractTokensFromCssVars(span.text, propertyName, path); + cssVarTokens.forEach((token) => { + tokens.push({ + ...token, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); + }); + } else { + // Check for direct token matches in non-reference spans + const matches = extractTokensFromText(span.text); + if (matches.length > 0) { + matches.forEach((match) => { + tokens = addTokenToArray( + { + property: propertyName, + token: [match], + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }, + tokens + ); + }); + } + } + } + } else { + // Standard processing for literals without spans + // First, check for direct token references + const matches = extractTokensFromText(importedValue.value); + if (matches.length > 0) { + matches.forEach((match) => { + tokens = addTokenToArray( + { + property: propertyName, + token: [match], + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }, + tokens + ); + }); + } else if (importedValue.value.includes('var(')) { + // Then check for CSS variable patterns + const cssVarTokens = extractTokensFromCssVars(importedValue.value, propertyName, path); + cssVarTokens.forEach((token) => { + tokens.push({ + ...token, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); + }); + } + } + } else { + // Non-literal values (like property access expressions) + if (isTokenReference(importedValue.value)) { + tokens = addTokenToArray( + { + property: propertyName, + token: [importedValue.value], + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }, + tokens + ); + } else { + // Check for any token references in the value + const matches = extractTokensFromText(importedValue.value); + if (matches.length > 0) { + matches.forEach((match) => { + tokens = addTokenToArray( + { + property: propertyName, + token: [match], + path, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }, + tokens + ); + }); + } + } + } + + // Cache the resolved tokens for future use + importedValue.resolvedTokens = tokens.map((token) => ({ ...token })); + } + + return tokens; +} From d42e43ff27587b5b8365ba6f6b8e271130cd5601 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 24 Apr 2025 15:10:39 -0700 Subject: [PATCH 37/75] move to nullish coalescing pass nodes from import analyzer out normalize args for processImportedStringTokens --- packages/token-analyzer/src/importAnalyzer.ts | 15 ++++ packages/token-analyzer/src/tokenResolver.ts | 78 ++++++++++++------- 2 files changed, 63 insertions(+), 30 deletions(-) diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index d2c0f6b9c..a5b841f92 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -13,6 +13,7 @@ interface TemplateSpan { isToken: boolean; // Whether this span is a token reference isReference: boolean; // Whether this span is a reference to another variable referenceName?: string; // The name of the referenced variable if isReference is true + node: Node; } /** @@ -278,6 +279,7 @@ function extractValueFromExpression( value: string; isLiteral: boolean; templateSpans?: TemplateSpan[]; + node: Node; } | undefined { if (!expression) { @@ -288,6 +290,7 @@ function extractValueFromExpression( return { value: expression.getLiteralValue(), isLiteral: true, + node: expression, }; } else if (Node.isTemplateExpression(expression)) { // Process the template head and spans fully @@ -303,6 +306,7 @@ function extractValueFromExpression( text: head, isToken: false, isReference: false, + node: expression.getHead(), }); } @@ -319,6 +323,7 @@ function extractValueFromExpression( text: spanText, isToken: true, isReference: false, + node: spanExpr, }); fullValue += spanText; } else if (Node.isIdentifier(spanExpr)) { @@ -328,6 +333,7 @@ function extractValueFromExpression( isToken: false, isReference: true, referenceName: spanText, + node: spanExpr, }); fullValue += spanText; } else { @@ -343,6 +349,7 @@ function extractValueFromExpression( text: resolvedExpr.value, isToken: false, isReference: false, + node: resolvedExpr.node, }); } fullValue += resolvedExpr.value; @@ -352,6 +359,7 @@ function extractValueFromExpression( text: spanText, isToken: false, isReference: false, + node: spanExpr, }); fullValue += spanText; } @@ -363,6 +371,7 @@ function extractValueFromExpression( text: literal, isToken: false, isReference: false, + node: span.getLiteral(), }); fullValue += literal; } @@ -372,6 +381,7 @@ function extractValueFromExpression( value: fullValue, isLiteral: true, templateSpans, + node: expression, }; } else if (Node.isIdentifier(expression)) { // Try to resolve the identifier to its value @@ -380,6 +390,7 @@ function extractValueFromExpression( return { value: expression.getText(), isLiteral: false, + node: expression, }; } @@ -389,6 +400,7 @@ function extractValueFromExpression( return { value: expression.getText(), isLiteral: false, + node: expression, }; } @@ -404,17 +416,20 @@ function extractValueFromExpression( return { value: expression.getText(), isLiteral: false, + node: expression, }; } else if (Node.isPropertyAccessExpression(expression)) { // Handle tokens.xyz or other property access return { value: expression.getText(), isLiteral: false, + node: expression, }; } else if (Node.isNoSubstitutionTemplateLiteral(expression)) { return { value: expression.getLiteralValue(), isLiteral: true, + node: expression, }; } diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index 556357fb9..3a3a92596 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -79,7 +79,7 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ matches.forEach((match) => { returnTokens = addTokenToArray( { - property: path[path.length - 1] || parentName, + property: path[path.length - 1] ?? parentName, token: [match], path, }, @@ -90,8 +90,9 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ // Then check if it's an imported value reference if (importedValues && importedValues.has(text)) { - const importTokens = processImportedStringTokens(importedValues, path[path.length - 1] || parentName, text, path); - returnTokens.push(...importTokens); + // const importTokens = processImportedStringTokens(importedValues, path[path.length - 1] ?? parentName, text, path); + const importTokens = processImportedStringTokens(info, text); + returnTokens = addTokenToArray(importTokens, returnTokens); } return returnTokens; @@ -105,7 +106,7 @@ const processPropertyAccess = (info: TokenResolverInfo if (isToken) { return addTokenToArray( { - property: path[path.length - 1] || parentName, + property: path[path.length - 1] ?? parentName, token: [text], path, }, @@ -248,7 +249,7 @@ const processTemplateExpression = (info: TokenResolverInfo): // Check for CSS var() syntax that might contain tokens if (text.includes('var(')) { - const cssVarTokens = extractTokensFromCssVars(text, path[path.length - 1] || parentName, path); + const cssVarTokens = extractTokensFromCssVars(text, path[path.length - 1] ?? parentName, path); return addTokenToArray(cssVarTokens, tokens); } else { // Check for direct token references @@ -259,7 +260,7 @@ const processTemplateExpression = (info: TokenResolverInfo): matches.forEach((match) => { returnTokens = addTokenToArray( { - property: path[path.length - 1] || parentName, + property: path[path.length - 1] ?? parentName, token: [match], path, }, @@ -280,7 +281,7 @@ const processPropertyAssignment = (info: TokenResolverInfo): const propertyNode = node.getInitializer(); return resolveToken({ - node: propertyNode || node, + node: propertyNode ?? node, path: newPath, parentName, tokens, @@ -291,13 +292,10 @@ const processPropertyAssignment = (info: TokenResolverInfo): /** * Process string tokens in imported values */ -export function processImportedStringTokens( - importedValues: Map, - propertyName: string, - value: string, - path: string[] = [] -): TokenReference[] { - let tokens: TokenReference[] = []; +export function processImportedStringTokens(info: TokenResolverInfo, value: string): TokenReference[] { + const { node, importedValues, parentName, path, tokens } = info; + let returnTokens = tokens.slice(); + const propertyName = path[path.length - 1] ?? parentName; // Check if the value is an imported value reference if (importedValues.has(value)) { @@ -319,7 +317,7 @@ export function processImportedStringTokens( for (const span of importedValue.templateSpans) { if (span.isToken) { // Direct token reference in span - tokens = addTokenToArray( + returnTokens = addTokenToArray( { property: propertyName, token: [span.text], @@ -327,28 +325,32 @@ export function processImportedStringTokens( isVariableReference: true, sourceFile: importedValue.sourceFile, }, - tokens + returnTokens ); } else if (span.isReference && span.referenceName && importedValues.has(span.referenceName)) { // Reference to another imported value - process recursively - const spanTokens = processImportedStringTokens(importedValues, propertyName, span.referenceName, path); - tokens.push(...spanTokens); + const spanTokens = processImportedStringTokens(info, span.referenceName); + returnTokens.push(...spanTokens); } else if (span.text.includes('var(')) { + // I think we can run resolveToken here. + // Check for CSS variables in the span text const cssVarTokens = extractTokensFromCssVars(span.text, propertyName, path); cssVarTokens.forEach((token) => { - tokens.push({ + returnTokens.push({ ...token, isVariableReference: true, sourceFile: importedValue.sourceFile, }); }); } else { + // If we call resolveToken above we might also be able to remove this + // Check for direct token matches in non-reference spans const matches = extractTokensFromText(span.text); if (matches.length > 0) { matches.forEach((match) => { - tokens = addTokenToArray( + returnTokens = addTokenToArray( { property: propertyName, token: [match], @@ -356,19 +358,23 @@ export function processImportedStringTokens( isVariableReference: true, sourceFile: importedValue.sourceFile, }, - tokens + returnTokens ); }); } } } } else { + // I think below can also call resolveToken + // + // + // Standard processing for literals without spans // First, check for direct token references const matches = extractTokensFromText(importedValue.value); if (matches.length > 0) { matches.forEach((match) => { - tokens = addTokenToArray( + returnTokens = addTokenToArray( { property: propertyName, token: [match], @@ -376,14 +382,18 @@ export function processImportedStringTokens( isVariableReference: true, sourceFile: importedValue.sourceFile, }, - tokens + returnTokens ); }); } else if (importedValue.value.includes('var(')) { + // I think below can also call resolveToken + // + // + // Then check for CSS variable patterns const cssVarTokens = extractTokensFromCssVars(importedValue.value, propertyName, path); cssVarTokens.forEach((token) => { - tokens.push({ + returnTokens.push({ ...token, isVariableReference: true, sourceFile: importedValue.sourceFile, @@ -392,9 +402,13 @@ export function processImportedStringTokens( } } } else { + // I think below can also call resolveToken + // + // + // Non-literal values (like property access expressions) if (isTokenReference(importedValue.value)) { - tokens = addTokenToArray( + returnTokens = addTokenToArray( { property: propertyName, token: [importedValue.value], @@ -402,14 +416,18 @@ export function processImportedStringTokens( isVariableReference: true, sourceFile: importedValue.sourceFile, }, - tokens + returnTokens ); } else { + // I think below can also call resolveToken + // + // + // Check for any token references in the value const matches = extractTokensFromText(importedValue.value); if (matches.length > 0) { matches.forEach((match) => { - tokens = addTokenToArray( + returnTokens = addTokenToArray( { property: propertyName, token: [match], @@ -417,7 +435,7 @@ export function processImportedStringTokens( isVariableReference: true, sourceFile: importedValue.sourceFile, }, - tokens + returnTokens ); }); } @@ -425,8 +443,8 @@ export function processImportedStringTokens( } // Cache the resolved tokens for future use - importedValue.resolvedTokens = tokens.map((token) => ({ ...token })); + importedValue.resolvedTokens = returnTokens.map((token) => ({ ...token })); } - return tokens; + return returnTokens; } From 3a466409e649868ead3cdc8b64c91bac2591c6b2 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 25 Apr 2025 02:42:25 -0700 Subject: [PATCH 38/75] additional code clean up and refactor --- packages/token-analyzer/src/importAnalyzer.ts | 3 + packages/token-analyzer/src/tokenResolver.ts | 102 ++++++++++-------- packages/token-analyzer/src/tokenUtils.ts | 31 +++--- 3 files changed, 81 insertions(+), 55 deletions(-) diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index a5b841f92..b7e076ca3 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -23,6 +23,7 @@ export interface ImportedValue { value: string; sourceFile: string; isLiteral: boolean; + node: Node; // Enhanced fields for template processing templateSpans?: TemplateSpan[]; // For template expressions with spans @@ -111,6 +112,7 @@ function processNamedImports( sourceFile: declarationFile.getFilePath(), isLiteral: valueInfo.isLiteral, templateSpans: valueInfo.templateSpans, + node: declaration, }); log(`Added imported value: ${alias} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); @@ -151,6 +153,7 @@ function processDefaultImport( sourceFile: declarationFile.getFilePath(), isLiteral: valueInfo.isLiteral, templateSpans: valueInfo.templateSpans, + node: declaration, }); log(`Added default import: ${importName} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index 3a3a92596..391ea608e 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -8,6 +8,10 @@ import { CallExpression, TemplateExpression, Identifier, + TemplateSpan, + TemplateHead, + TemplateMiddle, + TemplateTail, } from 'ts-morph'; import { TokenReference } from './types'; import { extractTokensFromCssVars } from './cssVarTokenExtractor'; @@ -20,6 +24,8 @@ interface TokenResolverInfo { parentName: string; tokens: TokenReference[]; importedValues: Map; + isVariableReference?: boolean; + sourceFile?: string; } /** @@ -31,6 +37,8 @@ interface TokenResolverInfo { export const resolveToken = (info: TokenResolverInfo): TokenReference[] => { const { node, tokens } = info; + console.log(info.node.getKindName()); + if (Node.isStringLiteral(node)) { // Path in the event we need to process string literals, however this isn't used given tokens are stored as // initialized values and imports. Generally, as property accessors or identifiers @@ -52,6 +60,16 @@ export const resolveToken = (info: TokenResolverInfo): TokenReference[] => { return processCallExpression(info as TokenResolverInfo); } else if (Node.isPropertyAssignment(node)) { return processPropertyAssignment(info as TokenResolverInfo); + } else if ( + Node.isTemplateSpan(node) || + Node.isTemplateHead(node) || + Node.isTemplateMiddle(node) || + Node.isTemplateTail(node) + ) { + // Unless we need specialized handling, use the template expression resolver + return processTemplateExpression( + info as TokenResolverInfo + ); } return tokens; @@ -67,7 +85,7 @@ const processStringLiteral = (info: TokenResolverInfo): TokenRefe }; const processIdentifier = (info: TokenResolverInfo): TokenReference[] => { - const { node, importedValues, parentName, path, tokens } = info; + const { node, importedValues, parentName, path, tokens, isVariableReference, sourceFile } = info; let returnTokens = tokens.slice(); @@ -83,7 +101,9 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ token: [match], path, }, - returnTokens + returnTokens, + isVariableReference, + sourceFile ); }); } @@ -92,14 +112,14 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ if (importedValues && importedValues.has(text)) { // const importTokens = processImportedStringTokens(importedValues, path[path.length - 1] ?? parentName, text, path); const importTokens = processImportedStringTokens(info, text); - returnTokens = addTokenToArray(importTokens, returnTokens); + returnTokens = addTokenToArray(importTokens, returnTokens, isVariableReference, sourceFile); } return returnTokens; }; const processPropertyAccess = (info: TokenResolverInfo): TokenReference[] => { - const { node, parentName, path, tokens } = info; + const { node, parentName, path, tokens, isVariableReference, sourceFile } = info; const text = node.getText(); const isToken = isTokenReference(text); @@ -110,7 +130,9 @@ const processPropertyAccess = (info: TokenResolverInfo token: [text], path, }, - tokens + tokens, + isVariableReference, + sourceFile ); } return tokens; @@ -190,7 +212,7 @@ const processFocusCallExpression = (info: TokenResolverInfo): To }; const processCallExpression = (info: TokenResolverInfo): TokenReference[] => { - const { node, path, parentName, tokens, importedValues } = info; + const { node, path, parentName, tokens, importedValues, isVariableReference, sourceFile } = info; let returnTokens = tokens.slice(); // Process calls like shorthands.borderColor(tokens.color) @@ -213,7 +235,9 @@ const processCallExpression = (info: TokenResolverInfo): TokenRe token: [argument.token], path: path.concat(argument.property), }, - returnTokens + returnTokens, + isVariableReference, + sourceFile ); }); } else { @@ -238,19 +262,21 @@ const processCallExpression = (info: TokenResolverInfo): TokenRe * @param info * @returns */ -const processTemplateExpression = (info: TokenResolverInfo): TokenReference[] => { +const processTemplateExpression = ( + info: TokenResolverInfo +): TokenReference[] => { /** * This is where we should process template spans and feed it back into resolveToken. We also need to check that * imported values are tokens etc. */ - const { node, path, parentName, tokens } = info; + const { node, path, parentName, tokens, isVariableReference, sourceFile } = info; const text = node.getText(); // Check for CSS var() syntax that might contain tokens if (text.includes('var(')) { const cssVarTokens = extractTokensFromCssVars(text, path[path.length - 1] ?? parentName, path); - return addTokenToArray(cssVarTokens, tokens); + return addTokenToArray(cssVarTokens, tokens, isVariableReference, sourceFile); } else { // Check for direct token references const matches = extractTokensFromText(node); @@ -263,8 +289,12 @@ const processTemplateExpression = (info: TokenResolverInfo): property: path[path.length - 1] ?? parentName, token: [match], path, + isVariableReference, + sourceFile, }, - returnTokens + returnTokens, + isVariableReference, + sourceFile ); }); } @@ -331,37 +361,17 @@ export function processImportedStringTokens(info: TokenResolverInfo, // Reference to another imported value - process recursively const spanTokens = processImportedStringTokens(info, span.referenceName); returnTokens.push(...spanTokens); - } else if (span.text.includes('var(')) { - // I think we can run resolveToken here. - - // Check for CSS variables in the span text - const cssVarTokens = extractTokensFromCssVars(span.text, propertyName, path); - cssVarTokens.forEach((token) => { - returnTokens.push({ - ...token, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); - }); } else { - // If we call resolveToken above we might also be able to remove this - - // Check for direct token matches in non-reference spans - const matches = extractTokensFromText(span.text); - if (matches.length > 0) { - matches.forEach((match) => { - returnTokens = addTokenToArray( - { - property: propertyName, - token: [match], - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }, - returnTokens - ); - }); - } + // Run the span back through our resolver + returnTokens = resolveToken({ + node: span.node, + path, + parentName, + tokens: returnTokens, + importedValues, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); } } } else { @@ -403,8 +413,16 @@ export function processImportedStringTokens(info: TokenResolverInfo, } } else { // I think below can also call resolveToken + // But we need more information like the actual node. We can't access property access expressions from a string // - // + + console.log( + 'non literal value ======================', + importedValue.node.getKindName(), + importedValue.value, + importedValue.node.getFullText(), + importedValue.node.getText() + ); // Non-literal values (like property access expressions) if (isTokenReference(importedValue.value)) { diff --git a/packages/token-analyzer/src/tokenUtils.ts b/packages/token-analyzer/src/tokenUtils.ts index f1fef55e1..b08dbdb50 100644 --- a/packages/token-analyzer/src/tokenUtils.ts +++ b/packages/token-analyzer/src/tokenUtils.ts @@ -52,9 +52,7 @@ export function getExpresionFromIdentifier(node: Node): Node | undefined { * @param textOrNode The text or Node to extract tokens from * @returns Array of token reference strings */ -export function extractTokensFromText( - textOrNode: string | Node | Symbol -): string[] { +export function extractTokensFromText(textOrNode: string | Node | Symbol): string[] { // If we have a Node or Symbol, extract the text to check let text: string | undefined; const matches: string[] = []; @@ -112,14 +110,9 @@ type FunctionParams = T extends (...args: infer P) => any ? P : never; * @param functionName The name of the shorthand function (e.g., "borderColor" or "shorthands.borderColor") * @returns Array of CSS property names affected by this shorthand */ -export function getPropertiesForShorthand( - functionName: string, - args: Node[] -): { property: string; token: string }[] { +export function getPropertiesForShorthand(functionName: string, args: Node[]): { property: string; token: string }[] { // Extract base function name if it's a qualified name (e.g., shorthands.borderColor -> borderColor) - const baseName = functionName.includes('.') - ? functionName.split('.').pop() - : functionName; + const baseName = functionName.includes('.') ? functionName.split('.').pop() : functionName; const cleanFunctionName = baseName as keyof typeof shorthands; const shorthandFunction = shorthands[cleanFunctionName]; @@ -162,16 +155,28 @@ export function getPropertiesForShorthand( */ export const addTokenToArray = ( tokensToAdd: TokenReference[] | TokenReference, - target: TokenReference[] + target: TokenReference[], + isVariableReference?: boolean, + sourceFile?: string ) => { // create new array without modifying the original array const newArray = target.slice(); // add items to the array if (Array.isArray(tokensToAdd)) { - newArray.push(...tokensToAdd); + newArray.push( + ...tokensToAdd.map((token) => ({ + ...token, + ...(isVariableReference && { isVariableReference }), + ...(sourceFile && { sourceFile }), + })) + ); } else { - newArray.push(tokensToAdd); + newArray.push({ + ...tokensToAdd, + ...(isVariableReference && { isVariableReference }), + ...(sourceFile && { sourceFile }), + }); } // return array without modifying the original array From c12bd2cf0838e59c748f75e65b268690d1c4c602 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 25 Apr 2025 12:27:13 -0700 Subject: [PATCH 39/75] additional refactor --- packages/token-analyzer/src/tokenResolver.ts | 87 +++++--------------- 1 file changed, 20 insertions(+), 67 deletions(-) diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index 391ea608e..f9d4f305a 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -375,55 +375,18 @@ export function processImportedStringTokens(info: TokenResolverInfo, } } } else { - // I think below can also call resolveToken - // - // - - // Standard processing for literals without spans - // First, check for direct token references - const matches = extractTokensFromText(importedValue.value); - if (matches.length > 0) { - matches.forEach((match) => { - returnTokens = addTokenToArray( - { - property: propertyName, - token: [match], - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }, - returnTokens - ); - }); - } else if (importedValue.value.includes('var(')) { - // I think below can also call resolveToken - // - // - - // Then check for CSS variable patterns - const cssVarTokens = extractTokensFromCssVars(importedValue.value, propertyName, path); - cssVarTokens.forEach((token) => { - returnTokens.push({ - ...token, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); - }); - } + // Run the span back through our resolver + returnTokens = resolveToken({ + node: importedValue.node, + path, + parentName, + tokens: returnTokens, + importedValues, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); } } else { - // I think below can also call resolveToken - // But we need more information like the actual node. We can't access property access expressions from a string - // - - console.log( - 'non literal value ======================', - importedValue.node.getKindName(), - importedValue.value, - importedValue.node.getFullText(), - importedValue.node.getText() - ); - // Non-literal values (like property access expressions) if (isTokenReference(importedValue.value)) { returnTokens = addTokenToArray( @@ -437,26 +400,16 @@ export function processImportedStringTokens(info: TokenResolverInfo, returnTokens ); } else { - // I think below can also call resolveToken - // - // - - // Check for any token references in the value - const matches = extractTokensFromText(importedValue.value); - if (matches.length > 0) { - matches.forEach((match) => { - returnTokens = addTokenToArray( - { - property: propertyName, - token: [match], - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }, - returnTokens - ); - }); - } + // Run the span back through our resolver + returnTokens = resolveToken({ + node: importedValue.node, + path, + parentName, + tokens: returnTokens, + importedValues, + isVariableReference: true, + sourceFile: importedValue.sourceFile, + }); } } From 9ec8be3aee65621fb14536cab1818d4f346ad9e1 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 29 Apr 2025 02:44:12 -0700 Subject: [PATCH 40/75] processing module imports and marking as known token modules clean up --- packages/token-analyzer/src/importAnalyzer.ts | 62 ++++++++++++++++--- packages/token-analyzer/src/tokenResolver.ts | 3 - packages/token-analyzer/src/types.ts | 14 +++++ 3 files changed, 68 insertions(+), 11 deletions(-) diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index b7e076ca3..df23409d6 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -1,7 +1,7 @@ // importAnalyzer.ts import { Project, Node, SourceFile, ImportDeclaration, Symbol, TypeChecker, SyntaxKind } from 'ts-morph'; import { log } from './debugUtils.js'; -import { TokenReference } from './types.js'; +import { knownTokenImportsAndModules, TokenReference } from './types.js'; import { getModuleSourceFile } from './moduleResolver.js'; import { isTokenReference } from './tokenUtils.js'; @@ -24,6 +24,7 @@ export interface ImportedValue { sourceFile: string; isLiteral: boolean; node: Node; + knownTokenPackage: boolean; // Enhanced fields for template processing templateSpans?: TemplateSpan[]; // For template expressions with spans @@ -67,20 +68,21 @@ async function processImportDeclaration( ): Promise { const moduleSpecifier = importDecl.getModuleSpecifierValue(); const containingFilePath = sourceFile.getFilePath(); - // Use our module resolver to get the imported file const importedFile = getModuleSourceFile(project, moduleSpecifier, containingFilePath); + console.log(moduleSpecifier, importedFile !== null); + if (!importedFile) { log(`Could not resolve module: ${moduleSpecifier}`); return; } // Process named imports (import { x } from 'module') - processNamedImports(importDecl, importedFile, project, importedValues, typeChecker); + processNamedImports(importDecl, importedFile, project, importedValues, typeChecker, moduleSpecifier); // Process default import (import x from 'module') - processDefaultImport(importDecl, importedFile, project, importedValues, typeChecker); + processDefaultImport(importDecl, importedFile, project, importedValues, typeChecker, moduleSpecifier); } /** @@ -91,7 +93,8 @@ function processNamedImports( importedFile: SourceFile, project: Project, importedValues: Map, - typeChecker: TypeChecker + typeChecker: TypeChecker, + moduleSpecifier: string ): void { for (const namedImport of importDecl.getNamedImports()) { const importName = namedImport.getName(); @@ -103,16 +106,42 @@ function processNamedImports( if (exportInfo) { const { declaration, sourceFile: declarationFile } = exportInfo; + // We need to first check if the import is coming from a known token package + // Extract the value from the declaration const valueInfo = extractValueFromDeclaration(declaration, typeChecker); - if (valueInfo) { + const knownTokenKeys = Object.keys(knownTokenImportsAndModules); + console.log( + importName, + knownTokenKeys, + exportInfo !== undefined, + valueInfo !== undefined, + declarationFile.getFilePath() + ); + // We should process the imports module import first to determrine if it's a known token package + // If it's not, we can then process it's value as it's likely another file within the application or library. + if ( + (knownTokenKeys.includes(importName) && knownTokenImportsAndModules[importName].includes(moduleSpecifier)) || + knownTokenImportsAndModules.default.includes(moduleSpecifier) + ) { + importedValues.set(alias, { + value: importName, + sourceFile: declarationFile.getFilePath(), + isLiteral: false, + node: declaration, + knownTokenPackage: true, + }); + + log(`Added known token import: ${alias} = ${importName} from ${declarationFile.getFilePath()}`); + } else if (valueInfo) { importedValues.set(alias, { value: valueInfo.value, sourceFile: declarationFile.getFilePath(), isLiteral: valueInfo.isLiteral, templateSpans: valueInfo.templateSpans, node: declaration, + knownTokenPackage: false, }); log(`Added imported value: ${alias} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); @@ -129,10 +158,12 @@ function processDefaultImport( importedFile: SourceFile, project: Project, importedValues: Map, - typeChecker: TypeChecker + typeChecker: TypeChecker, + moduleSpecifier: string ): void { const defaultImport = importDecl.getDefaultImport(); if (!defaultImport) { + log(`No default import found in ${importDecl.getModuleSpecifierValue()}`); return; } @@ -141,19 +172,34 @@ function processDefaultImport( // Find the default export's true source const exportInfo = findExportDeclaration(importedFile, 'default', typeChecker); + console.log(importName, Object.keys(knownTokenImportsAndModules)); + if (exportInfo) { const { declaration, sourceFile: declarationFile } = exportInfo; // Extract the value from the declaration const valueInfo = extractValueFromDeclaration(declaration, typeChecker); + const knownTokenKeys = Object.keys(knownTokenImportsAndModules); - if (valueInfo) { + if ( + (knownTokenKeys.includes(importName) && knownTokenImportsAndModules[importName].includes(moduleSpecifier)) || + knownTokenImportsAndModules.default.includes(moduleSpecifier) + ) { + importedValues.set(importName, { + value: importName, + sourceFile: declarationFile.getFilePath(), + isLiteral: false, + node: declaration, + knownTokenPackage: true, + }); + } else if (valueInfo) { importedValues.set(importName, { value: valueInfo.value, sourceFile: declarationFile.getFilePath(), isLiteral: valueInfo.isLiteral, templateSpans: valueInfo.templateSpans, node: declaration, + knownTokenPackage: false, }); log(`Added default import: ${importName} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index f9d4f305a..4733e2430 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -37,8 +37,6 @@ interface TokenResolverInfo { export const resolveToken = (info: TokenResolverInfo): TokenReference[] => { const { node, tokens } = info; - console.log(info.node.getKindName()); - if (Node.isStringLiteral(node)) { // Path in the event we need to process string literals, however this isn't used given tokens are stored as // initialized values and imports. Generally, as property accessors or identifiers @@ -110,7 +108,6 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ // Then check if it's an imported value reference if (importedValues && importedValues.has(text)) { - // const importTokens = processImportedStringTokens(importedValues, path[path.length - 1] ?? parentName, text, path); const importTokens = processImportedStringTokens(info, text); returnTokens = addTokenToArray(importTokens, returnTokens, isVariableReference, sourceFile); } diff --git a/packages/token-analyzer/src/types.ts b/packages/token-analyzer/src/types.ts index f17ee7fd6..57baaca17 100644 --- a/packages/token-analyzer/src/types.ts +++ b/packages/token-analyzer/src/types.ts @@ -52,3 +52,17 @@ export const IGNORED_DIRS = ['node_modules', 'dist', 'build', '.git']; export const VALID_EXTENSIONS = ['.ts', '.tsx', '.js', '.jsx', '.mjs']; export type TokenMap = Map; + +/** + * This type houses the known named token imports ex: `tokens` and the modules they are imported from. + */ +export type KnownTokenImportsAndModules = { + [key: string]: string[]; +}; + +export const knownTokenImportsAndModules: KnownTokenImportsAndModules = { + // if we see any imports from the defaults, we assume it's a token. + default: ['@fluentui/semantic-tokens'], + // begin the known token imports + tokens: ['@fluentui/react-theme', '@fluentui/react-components', '@fluentui/tokens'], +}; From ea3caeab47e220fcb332a08404a90881de27b91a Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 29 Apr 2025 15:12:14 -0700 Subject: [PATCH 41/75] mark tokens based on imports and symbol mapping --- .../token-analyzer/src/__tests__/e2e.test.ts | 39 ++++------ .../src/__tests__/test-files/analysis.json | 2 +- .../test-files/useButtonStyles.styles.ts | 36 ++-------- packages/token-analyzer/src/astAnalyzer.ts | 11 +-- packages/token-analyzer/src/importAnalyzer.ts | 52 +++++++++----- packages/token-analyzer/src/tokenResolver.ts | 72 ++++++++----------- packages/token-analyzer/src/tokenUtils.ts | 37 ++++++++-- packages/token-analyzer/src/types.ts | 17 ++++- 8 files changed, 139 insertions(+), 127 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index 2ab07fc18..8155d2101 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -19,12 +19,10 @@ describe('e2e test', () => { targetPath = path.join(__dirname, 'test-files', 'analysis.json'); await analyzeProjectStyles(tempDir, targetPath); - await fs - .readFile(path.join(tempDir, 'analysis.json'), 'utf-8') - .then((analysisData) => { - // Parse the JSON data from our analysis and start validating it - analysis = JSON.parse(analysisData); - }); + await fs.readFile(path.join(tempDir, 'analysis.json'), 'utf-8').then((analysisData) => { + // Parse the JSON data from our analysis and start validating it + analysis = JSON.parse(analysisData); + }); styles = analysis[styleFileName].styles; }, 100000); @@ -53,10 +51,7 @@ describe('e2e test', () => { */ const tokenTestFactory = (tokenArray: any) => { return (propertyName: string, expectedToken: string) => { - const token = tokenArray.some( - (t: any) => - t.property === propertyName && t.token.includes(expectedToken) - ); + const token = tokenArray.some((t: any) => t.property === propertyName && t.token.includes(expectedToken)); expect(token).toBeTruthy(); }; }; @@ -67,12 +62,9 @@ describe('e2e test', () => { * @param testArray the known set of tokens are we looking for */ const checkTokens = (tokenArray: () => any[], testArray: any[]) => { - test.each(testArray)( - '%s token is properly configured', - (propertyName, expectedToken) => { - tokenTestFactory(tokenArray())(propertyName, expectedToken); - } - ); + test.each(testArray)('%s token is properly configured', (propertyName, expectedToken) => { + tokenTestFactory(tokenArray())(propertyName, expectedToken); + }); // Check if the length of the token array matches the expected length test(`token array length should be ${testArray.length}`, () => { @@ -93,9 +85,7 @@ describe('e2e test', () => { // Define token cases for active hover makeResetStyles tests checkTokens( - () => - styles.useRootBaseClassName.resetStyles.nested["':hover:active'"] - .tokens, + () => styles.useRootBaseClassName.resetStyles.nested["':hover:active'"].tokens, [ ['backgroundColor', 'tokens.colorNeutralBackground1Pressed'], ['borderColor', 'tokens.colorNeutralStroke1Pressed'], @@ -108,7 +98,7 @@ describe('e2e test', () => { () => styles.useRootBaseClassName.resetStyles.tokens, [ ['backgroundColor', 'tokens.colorNeutralBackground1'], - ['color', 'tokens.colorNeutralForeground1'], + ['color', 'semanticTokens.colorNeutralForeground1'], ['border', 'tokens.strokeWidthThin'], ['border', 'tokens.colorNeutralStroke1'], ['fontFamily', 'tokens.fontFamilyBase'], @@ -138,9 +128,7 @@ describe('e2e test', () => { // Token cases for makeResetStyles mozilla bug checkTokens( () => - styles.useRootBaseClassName.resetStyles.nested[ - "'@supports (-moz-appearance:button)'" - ].nested[':focus'].tokens, + styles.useRootBaseClassName.resetStyles.nested["'@supports (-moz-appearance:button)'"].nested[':focus'].tokens, [ ['boxShadow', 'tokens.colorStrokeFocus2'], ['boxShadow', 'tokens.strokeWidthThin'], @@ -149,9 +137,6 @@ describe('e2e test', () => { }); describe('validate makeStyles tokens', () => { - checkTokens( - () => styles.useRootStyles.outline.tokens, - [['backgroundColor', 'tokens.colorTransparentBackground']] - ); + checkTokens(() => styles.useRootStyles.outline.tokens, [['backgroundColor', 'tokens.colorTransparentBackground']]); }); }); diff --git a/packages/token-analyzer/src/__tests__/test-files/analysis.json b/packages/token-analyzer/src/__tests__/test-files/analysis.json index 7f5cbc27f..c18c87e16 100644 --- a/packages/token-analyzer/src/__tests__/test-files/analysis.json +++ b/packages/token-analyzer/src/__tests__/test-files/analysis.json @@ -11,7 +11,7 @@ }, { "property": "color", - "token": ["tokens.colorNeutralForeground1"], + "token": ["semanticTokens.colorNeutralForeground1"], "path": ["color"] }, { diff --git a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts index 9c4c1a11e..6bd70eacc 100644 --- a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts +++ b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts @@ -1,17 +1,10 @@ -import { - iconFilledClassName, - iconRegularClassName, -} from '@fluentui/react-icons'; +import { iconFilledClassName, iconRegularClassName } from '@fluentui/react-icons'; import { createCustomFocusIndicatorStyle } from '@fluentui/react-tabster'; import { tokens } from '@fluentui/react-theme'; -import { - shorthands, - makeStyles, - makeResetStyles, - mergeClasses, -} from '@griffel/react'; +import { shorthands, makeStyles, makeResetStyles, mergeClasses } from '@griffel/react'; import type { SlotClassNames } from '@fluentui/react-utilities'; import type { ButtonSlots, ButtonState } from '@fluentui/react-components'; +import * as semanticTokens from '@fluentui/tokens'; export const buttonClassNames: SlotClassNames = { root: 'fui-Button', @@ -46,7 +39,7 @@ const useRootBaseClassName = makeResetStyles({ overflow: 'hidden', backgroundColor: tokens.colorNeutralBackground1, - color: tokens.colorNeutralForeground1, + color: semanticTokens.colorNeutralForeground1, border: `${tokens.strokeWidthThin} solid ${tokens.colorNeutralStroke1}`, fontFamily: tokens.fontFamilyBase, @@ -473,11 +466,7 @@ const useRootFocusStyles = makeStyles({ primary: { ...createCustomFocusIndicatorStyle({ // added another color here to test the shorthands output. - ...shorthands.borderColor( - tokens.colorStrokeFocus2, - tokens.colorStrokeFocus1, - tokenInInitializer - ), + ...shorthands.borderColor(tokens.colorStrokeFocus2, tokens.colorStrokeFocus1, tokenInInitializer), boxShadow: `${tokens.shadow2}, 0 0 0 ${tokens.strokeWidthThin} ${tokens.colorStrokeFocus2} inset, 0 0 0 ${tokens.strokeWidthThick} ${tokens.colorNeutralForegroundOnBrand} inset`, ':hover': { boxShadow: `${tokens.shadow2}, 0 0 0 ${tokens.strokeWidthThin} ${tokens.colorStrokeFocus2} inset`, @@ -571,16 +560,7 @@ export const useButtonStyles_unstable = (state: ButtonState): ButtonState => { const rootIconOnlyStyles = useRootIconOnlyStyles(); const iconStyles = useIconStyles(); - const { - appearance, - disabled, - disabledFocusable, - icon, - iconOnly, - iconPosition, - shape, - size, - } = state; + const { appearance, disabled, disabledFocusable, icon, iconOnly, iconPosition, shape, size } = state; state.root.className = mergeClasses( buttonClassNames.root, @@ -596,9 +576,7 @@ export const useButtonStyles_unstable = (state: ButtonState): ButtonState => { // Disabled styles (disabled || disabledFocusable) && rootDisabledStyles.base, (disabled || disabledFocusable) && rootDisabledStyles.highContrast, - appearance && - (disabled || disabledFocusable) && - rootDisabledStyles[appearance], + appearance && (disabled || disabledFocusable) && rootDisabledStyles[appearance], // Focus styles appearance === 'primary' && rootFocusStyles.primary, diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index 81e9ed567..d72275163 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -37,6 +37,7 @@ interface VariableMapping { function processStyleProperty( prop: PropertyAssignment | SpreadAssignment, importedValues: Map, + project: Project, isResetStyles?: boolean ): TokenReference[] { let tokens: TokenReference[] = []; @@ -51,6 +52,7 @@ function processStyleProperty( parentName, tokens, importedValues, + project, }); return tokens; @@ -223,7 +225,8 @@ function createMetadata(styleMappings: StyleMapping[]): StyleMetadata { */ async function analyzeMakeStyles( sourceFile: SourceFile, - importedValues: Map + importedValues: Map, + project: Project ): Promise { const analysis: StyleAnalysis = {}; @@ -236,7 +239,7 @@ async function analyzeMakeStyles( stylesArg.getProperties().forEach((prop) => { if (Node.isPropertyAssignment(prop)) { const styleName = prop.getName(); - const tokens = processStyleProperty(prop, importedValues); + const tokens = processStyleProperty(prop, importedValues, project); const functionName = parentNode.getName(); if (!analysis[functionName]) { analysis[functionName] = {}; @@ -266,7 +269,7 @@ async function analyzeMakeStyles( // Process the styles object stylesArg.getProperties().forEach((prop) => { if (Node.isPropertyAssignment(prop) || Node.isSpreadAssignment(prop)) { - const tokens = processStyleProperty(prop, importedValues, true); + const tokens = processStyleProperty(prop, importedValues, project, true); if (tokens.length) { const styleContent = createStyleContent(tokens); analysis[functionName][makeResetStylesToken].tokens = analysis[functionName][ @@ -335,7 +338,7 @@ async function analyzeFile(filePath: string, project: Project): Promise('analyze makeStyles', () => - analyzeMakeStyles(sourceFile, importedValues) + analyzeMakeStyles(sourceFile, importedValues, project) ); // Create enhanced analysis with separated styles and metadata diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index df23409d6..dbb6c22c7 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -3,7 +3,7 @@ import { Project, Node, SourceFile, ImportDeclaration, Symbol, TypeChecker, Synt import { log } from './debugUtils.js'; import { knownTokenImportsAndModules, TokenReference } from './types.js'; import { getModuleSourceFile } from './moduleResolver.js'; -import { isTokenReference } from './tokenUtils.js'; +import { isTokenReferenceOld } from './tokenUtils.js'; /** * Represents a portion of a template expression @@ -71,8 +71,6 @@ async function processImportDeclaration( // Use our module resolver to get the imported file const importedFile = getModuleSourceFile(project, moduleSpecifier, containingFilePath); - console.log(moduleSpecifier, importedFile !== null); - if (!importedFile) { log(`Could not resolve module: ${moduleSpecifier}`); return; @@ -83,6 +81,8 @@ async function processImportDeclaration( // Process default import (import x from 'module') processDefaultImport(importDecl, importedFile, project, importedValues, typeChecker, moduleSpecifier); + + processNamespaceImport(importDecl, importedFile, project, importedValues, typeChecker, moduleSpecifier); } /** @@ -112,13 +112,7 @@ function processNamedImports( const valueInfo = extractValueFromDeclaration(declaration, typeChecker); const knownTokenKeys = Object.keys(knownTokenImportsAndModules); - console.log( - importName, - knownTokenKeys, - exportInfo !== undefined, - valueInfo !== undefined, - declarationFile.getFilePath() - ); + // We should process the imports module import first to determrine if it's a known token package // If it's not, we can then process it's value as it's likely another file within the application or library. if ( @@ -129,7 +123,7 @@ function processNamedImports( value: importName, sourceFile: declarationFile.getFilePath(), isLiteral: false, - node: declaration, + node: namedImport, // Use the alias node if available, otherwise use the declaration knownTokenPackage: true, }); @@ -172,19 +166,13 @@ function processDefaultImport( // Find the default export's true source const exportInfo = findExportDeclaration(importedFile, 'default', typeChecker); - console.log(importName, Object.keys(knownTokenImportsAndModules)); - if (exportInfo) { const { declaration, sourceFile: declarationFile } = exportInfo; // Extract the value from the declaration const valueInfo = extractValueFromDeclaration(declaration, typeChecker); - const knownTokenKeys = Object.keys(knownTokenImportsAndModules); - if ( - (knownTokenKeys.includes(importName) && knownTokenImportsAndModules[importName].includes(moduleSpecifier)) || - knownTokenImportsAndModules.default.includes(moduleSpecifier) - ) { + if (knownTokenImportsAndModules.default.includes(moduleSpecifier)) { importedValues.set(importName, { value: importName, sourceFile: declarationFile.getFilePath(), @@ -207,6 +195,32 @@ function processDefaultImport( } } +function processNamespaceImport( + importDecl: ImportDeclaration, + importedFile: SourceFile, + project: Project, + importedValues: Map, + typeChecker: TypeChecker, + moduleSpecifier: string +): void { + const namespaceImport = importDecl.getNamespaceImport(); + if (!namespaceImport) { + log(`No namespace import found in ${importDecl.getModuleSpecifierValue()}`); + return; + } + const importName = namespaceImport.getText(); + // Find the default export's true source + if (knownTokenImportsAndModules.default.includes(moduleSpecifier)) { + importedValues.set(importName, { + value: importName, + sourceFile: importedFile.getFilePath(), + isLiteral: false, + node: namespaceImport, + knownTokenPackage: true, + }); + } +} + /** * Find an export's original declaration using TypeScript's type checker */ @@ -366,7 +380,7 @@ function extractValueFromExpression( const literal = span.getLiteral().getLiteralText(); // Handle different types of expressions in template spans - if (Node.isPropertyAccessExpression(spanExpr) && isTokenReference(spanExpr)) { + if (Node.isPropertyAccessExpression(spanExpr) && isTokenReferenceOld(spanExpr)) { // Direct token reference in template span templateSpans.push({ text: spanText, diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index 4733e2430..bb4987d9b 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -13,21 +13,17 @@ import { TemplateMiddle, TemplateTail, } from 'ts-morph'; -import { TokenReference } from './types'; +import { TokenReference, TokenResolverInfo } from './types'; import { extractTokensFromCssVars } from './cssVarTokenExtractor'; -import { addTokenToArray, extractTokensFromText, getPropertiesForShorthand, isTokenReference } from './tokenUtils'; +import { + addTokenToArray, + extractTokensFromText, + getPropertiesForShorthand, + isTokenReference, + isTokenReferenceOld, +} from './tokenUtils'; import { ImportedValue } from './importAnalyzer'; -interface TokenResolverInfo { - node: T; - path: string[]; - parentName: string; - tokens: TokenReference[]; - importedValues: Map; - isVariableReference?: boolean; - sourceFile?: string; -} - /** * Function that centarlizes the logic for resolving tokens from a node. * Given that this is recursive logic, it's much easier to pass this back to itself. @@ -116,10 +112,17 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ }; const processPropertyAccess = (info: TokenResolverInfo): TokenReference[] => { - const { node, parentName, path, tokens, isVariableReference, sourceFile } = info; + const { node, parentName, path, tokens, isVariableReference, sourceFile, importedValues, project } = info; const text = node.getText(); - const isToken = isTokenReference(text); + + const expression = node.getExpression(); + const expressionText = expression.getText(); + if (expressionText === 'tokens') { + console.log('Checking for semantic tokens!', isTokenReference(info)); + } + + const isToken = isTokenReference(info); if (isToken) { return addTokenToArray( { @@ -136,17 +139,14 @@ const processPropertyAccess = (info: TokenResolverInfo }; const processObjectLiteral = (info: TokenResolverInfo): TokenReference[] => { - const { node, parentName, path, tokens, importedValues } = info; + const { node, tokens } = info; let returnTokens = tokens.slice(); node.getProperties().forEach((childProp) => { returnTokens = returnTokens.concat( resolveToken({ + ...info, node: childProp, - path, - parentName, - tokens, - importedValues, }) ); }); @@ -154,14 +154,11 @@ const processObjectLiteral = (info: TokenResolverInfo): }; const processSpreadAssignment = (info: TokenResolverInfo): TokenReference[] => { - const { node, path, parentName, tokens, importedValues } = info; + const { node, tokens } = info; return tokens.concat( resolveToken({ + ...info, node: node.getExpression(), - path, - parentName, - tokens, - importedValues, }) ); }; @@ -197,6 +194,7 @@ const processFocusCallExpression = (info: TokenResolverInfo): To // We can simplify the logic since we process node types and extract within resolveTokens. We merely need to pass // the updated path return resolveToken({ + ...info, node: passedTokens, path: [...path, nestedModifier], parentName, @@ -209,7 +207,7 @@ const processFocusCallExpression = (info: TokenResolverInfo): To }; const processCallExpression = (info: TokenResolverInfo): TokenReference[] => { - const { node, path, parentName, tokens, importedValues, isVariableReference, sourceFile } = info; + const { node, path, tokens, importedValues, isVariableReference, sourceFile } = info; let returnTokens = tokens.slice(); // Process calls like shorthands.borderColor(tokens.color) @@ -242,9 +240,9 @@ const processCallExpression = (info: TokenResolverInfo): TokenRe node.getArguments().forEach((argument) => { returnTokens = returnTokens.concat( resolveToken({ + ...info, node: argument, path: [...path, functionName], - parentName, tokens: returnTokens, importedValues, }) @@ -301,18 +299,16 @@ const processTemplateExpression = ( }; const processPropertyAssignment = (info: TokenResolverInfo): TokenReference[] => { - const { node, path, parentName, tokens, importedValues } = info; + const { node, path } = info; const childName = node.getName(); const newPath = [...path, childName]; const propertyNode = node.getInitializer(); return resolveToken({ + ...info, node: propertyNode ?? node, path: newPath, - parentName, - tokens, - importedValues, }); }; @@ -320,7 +316,7 @@ const processPropertyAssignment = (info: TokenResolverInfo): * Process string tokens in imported values */ export function processImportedStringTokens(info: TokenResolverInfo, value: string): TokenReference[] { - const { node, importedValues, parentName, path, tokens } = info; + const { importedValues, parentName, path, tokens } = info; let returnTokens = tokens.slice(); const propertyName = path[path.length - 1] ?? parentName; @@ -361,11 +357,9 @@ export function processImportedStringTokens(info: TokenResolverInfo, } else { // Run the span back through our resolver returnTokens = resolveToken({ + ...info, node: span.node, - path, - parentName, tokens: returnTokens, - importedValues, isVariableReference: true, sourceFile: importedValue.sourceFile, }); @@ -374,18 +368,16 @@ export function processImportedStringTokens(info: TokenResolverInfo, } else { // Run the span back through our resolver returnTokens = resolveToken({ + ...info, node: importedValue.node, - path, - parentName, tokens: returnTokens, - importedValues, isVariableReference: true, sourceFile: importedValue.sourceFile, }); } } else { // Non-literal values (like property access expressions) - if (isTokenReference(importedValue.value)) { + if (isTokenReferenceOld(importedValue.value)) { returnTokens = addTokenToArray( { property: propertyName, @@ -399,11 +391,9 @@ export function processImportedStringTokens(info: TokenResolverInfo, } else { // Run the span back through our resolver returnTokens = resolveToken({ + ...info, node: importedValue.node, - path, - parentName, tokens: returnTokens, - importedValues, isVariableReference: true, sourceFile: importedValue.sourceFile, }); diff --git a/packages/token-analyzer/src/tokenUtils.ts b/packages/token-analyzer/src/tokenUtils.ts index b08dbdb50..6087cab74 100644 --- a/packages/token-analyzer/src/tokenUtils.ts +++ b/packages/token-analyzer/src/tokenUtils.ts @@ -1,14 +1,41 @@ // tokenUtils.ts -import { Node, Symbol, SyntaxKind } from 'ts-morph'; -import { TOKEN_REGEX, TokenReference } from './types.js'; +import { Symbol, SyntaxKind, Node, ImportSpecifier } from 'ts-morph'; +import { TOKEN_REGEX, TokenReference, TokenResolverInfo } from './types.js'; import { shorthands } from '@griffel/react'; +export function isTokenReference(info: TokenResolverInfo): boolean { + const { node, importedValues, project } = info; + let calledSymbol: Symbol | undefined; + let calledNodeName = node.getText(); + let importedSymbol: Symbol | undefined; + const checker = project.getTypeChecker(); + if (Node.isPropertyAccessExpression(node)) { + const expression = node.getExpression(); + calledNodeName = expression.getText(); + calledSymbol = checker.getSymbolAtLocation(expression); + } else { + calledSymbol = checker.getSymbolAtLocation(node); + } + + const knownTokenValue = importedValues.get(calledNodeName); + if (knownTokenValue) { + const knownTokenNode = knownTokenValue.node; + importedSymbol = checker.getSymbolAtLocation(knownTokenNode); + if (importedSymbol === undefined && Node.isImportSpecifier(knownTokenNode)) { + importedSymbol = checker.getSymbolAtLocation(knownTokenNode.getNameNode()); + } + } + + // If we have a known token that is equal to an imported value and both resolve we know it's a token + return calledSymbol !== undefined && importedSymbol !== undefined && calledSymbol === importedSymbol; +} + /** * Centralizes token detection logic to make future changes easier * @param textOrNode The text or Node to check for token references * @returns true if the text/node contains a token reference */ -export function isTokenReference(textOrNode: string | Node | Symbol): boolean { +export function isTokenReferenceOld(textOrNode: string | Node | Symbol): boolean { // If we have a Node or Symbol, extract the text to check let text: string; @@ -61,7 +88,7 @@ export function extractTokensFromText(textOrNode: string | Node | Symbol): strin text = textOrNode; } else if (Node.isNode(textOrNode) && Node.isTemplateExpression(textOrNode)) { textOrNode.getTemplateSpans().forEach((span) => { - if (isTokenReference(span.getExpression().getText())) { + if (isTokenReferenceOld(span.getExpression().getText())) { const token = span.getExpression().getText(); matches.push(token); } else { @@ -131,7 +158,7 @@ export function getPropertiesForShorthand(functionName: string, args: Node[]): { Object.keys(shortHandOutput).forEach((key) => { const value = shortHandOutput[key as keyof typeof shortHandOutput]; - if (isTokenReference(value)) { + if (isTokenReferenceOld(value)) { shortHandTokens.push({ property: key, token: value, diff --git a/packages/token-analyzer/src/types.ts b/packages/token-analyzer/src/types.ts index 57baaca17..e9946309c 100644 --- a/packages/token-analyzer/src/types.ts +++ b/packages/token-analyzer/src/types.ts @@ -1,3 +1,6 @@ +import { Project, Node } from 'ts-morph'; +import { ImportedValue } from './importAnalyzer'; + // types.ts export interface TokenReference { property: string; @@ -62,7 +65,19 @@ export type KnownTokenImportsAndModules = { export const knownTokenImportsAndModules: KnownTokenImportsAndModules = { // if we see any imports from the defaults, we assume it's a token. - default: ['@fluentui/semantic-tokens'], + // @fluentui/tokens is here as a test but should be removed in the future + default: ['@fluentui/semantic-tokens', '@fluentui/tokens'], // begin the known token imports tokens: ['@fluentui/react-theme', '@fluentui/react-components', '@fluentui/tokens'], }; + +export interface TokenResolverInfo { + node: T; + path: string[]; + parentName: string; + tokens: TokenReference[]; + importedValues: Map; + project: Project; + isVariableReference?: boolean; + sourceFile?: string; +} From c195537140b4829df3e78793f4e89fd2729c35e0 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 30 Apr 2025 02:12:56 -0700 Subject: [PATCH 42/75] additional changes optimizations removal of unnecessary code. --- .../token-analyzer/src/__tests__/e2e.test.ts | 4 +- .../src/__tests__/test-files/import-test.ts | 3 + .../test-files/semantic-tokens/index.ts | 3 + .../semantic-tokens/semantic-tokens.ts | 2 + .../test-files/useButtonStyles.styles.ts | 8 +- packages/token-analyzer/src/tokenResolver.ts | 147 +++--------------- packages/token-analyzer/src/tokenUtils.ts | 8 +- 7 files changed, 41 insertions(+), 134 deletions(-) create mode 100644 packages/token-analyzer/src/__tests__/test-files/import-test.ts create mode 100644 packages/token-analyzer/src/__tests__/test-files/semantic-tokens/index.ts create mode 100644 packages/token-analyzer/src/__tests__/test-files/semantic-tokens/semantic-tokens.ts diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index 8155d2101..124c37b72 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -78,7 +78,7 @@ describe('e2e test', () => { () => styles.useRootBaseClassName.resetStyles.nested["':hover'"].tokens, [ ['backgroundColor', 'tokens.colorNeutralBackground1Hover'], - ['borderColor', 'tokens.colorNeutralStroke1Hover'], + ['borderColor', 'anotherToken'], ['color', 'tokens.colorNeutralForeground1Hover'], ] ); @@ -101,7 +101,7 @@ describe('e2e test', () => { ['color', 'semanticTokens.colorNeutralForeground1'], ['border', 'tokens.strokeWidthThin'], ['border', 'tokens.colorNeutralStroke1'], - ['fontFamily', 'tokens.fontFamilyBase'], + ['fontFamily', 'someToken'], ['padding', 'tokens.spacingHorizontalM'], ['borderRadius', 'tokens.borderRadiusMedium'], ['fontSize', 'tokens.fontSizeBase300'], diff --git a/packages/token-analyzer/src/__tests__/test-files/import-test.ts b/packages/token-analyzer/src/__tests__/test-files/import-test.ts new file mode 100644 index 000000000..00525f076 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/test-files/import-test.ts @@ -0,0 +1,3 @@ +import { anotherToken } from '@fluentui/semantic-tokens'; + +export const importTest = anotherToken; diff --git a/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/index.ts b/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/index.ts new file mode 100644 index 000000000..58bb5be6f --- /dev/null +++ b/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/index.ts @@ -0,0 +1,3 @@ +export { someToken, anotherToken } from './semantic-tokens'; + +export const colorNeutralForeground1 = '--color-neutral-foreground-1'; diff --git a/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/semantic-tokens.ts b/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/semantic-tokens.ts new file mode 100644 index 000000000..6874e0204 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/semantic-tokens.ts @@ -0,0 +1,2 @@ +export const someToken = ''; +export const anotherToken = ''; diff --git a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts index 6bd70eacc..86ea57c82 100644 --- a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts +++ b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts @@ -4,7 +4,9 @@ import { tokens } from '@fluentui/react-theme'; import { shorthands, makeStyles, makeResetStyles, mergeClasses } from '@griffel/react'; import type { SlotClassNames } from '@fluentui/react-utilities'; import type { ButtonSlots, ButtonState } from '@fluentui/react-components'; -import * as semanticTokens from '@fluentui/tokens'; +import * as semanticTokens from '@fluentui/semantic-tokens'; +import { someToken } from '@fluentui/semantic-tokens'; +import { importTest } from './import-test'; export const buttonClassNames: SlotClassNames = { root: 'fui-Button', @@ -42,12 +44,12 @@ const useRootBaseClassName = makeResetStyles({ color: semanticTokens.colorNeutralForeground1, border: `${tokens.strokeWidthThin} solid ${tokens.colorNeutralStroke1}`, - fontFamily: tokens.fontFamilyBase, + fontFamily: someToken, outlineStyle: 'none', ':hover': { backgroundColor: tokens.colorNeutralBackground1Hover, - borderColor: tokens.colorNeutralStroke1Hover, + borderColor: importTest, color: tokens.colorNeutralForeground1Hover, cursor: 'pointer', diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index bb4987d9b..76f520204 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -18,6 +18,7 @@ import { extractTokensFromCssVars } from './cssVarTokenExtractor'; import { addTokenToArray, extractTokensFromText, + getInitializerFromIdentifier, getPropertiesForShorthand, isTokenReference, isTokenReferenceOld, @@ -84,44 +85,35 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ let returnTokens = tokens.slice(); const text = node.getText(); - - // First check if it matches the token regex directly - const matches = extractTokensFromText(node); - if (matches.length > 0) { - matches.forEach((match) => { - returnTokens = addTokenToArray( - { - property: path[path.length - 1] ?? parentName, - token: [match], - path, - }, - returnTokens, - isVariableReference, - sourceFile - ); - }); - } - - // Then check if it's an imported value reference - if (importedValues && importedValues.has(text)) { - const importTokens = processImportedStringTokens(info, text); - returnTokens = addTokenToArray(importTokens, returnTokens, isVariableReference, sourceFile); + console.log('Processing identifier', text, getInitializerFromIdentifier(node)); + // knownTokenPackage is set to false for our importTest + if (isTokenReference(info)) { + console.log('Found a token reference', text); + // Found a token, we should process and return it + const propertyName = path[path.length - 1] ?? parentName; + returnTokens = addTokenToArray( + { + property: propertyName, + token: [text], + path, + }, + returnTokens, + isVariableReference, + sourceFile + ); + } else if (getInitializerFromIdentifier(node)) { + // we have a variable declaration and we should then check if the value is a token as well. Reprocess the node + console.log(getInitializerFromIdentifier(node)?.getText()); + // return } return returnTokens; }; const processPropertyAccess = (info: TokenResolverInfo): TokenReference[] => { - const { node, parentName, path, tokens, isVariableReference, sourceFile, importedValues, project } = info; + const { node, parentName, path, tokens, isVariableReference, sourceFile } = info; const text = node.getText(); - - const expression = node.getExpression(); - const expressionText = expression.getText(); - if (expressionText === 'tokens') { - console.log('Checking for semantic tokens!', isTokenReference(info)); - } - const isToken = isTokenReference(info); if (isToken) { return addTokenToArray( @@ -311,98 +303,3 @@ const processPropertyAssignment = (info: TokenResolverInfo): path: newPath, }); }; - -/** - * Process string tokens in imported values - */ -export function processImportedStringTokens(info: TokenResolverInfo, value: string): TokenReference[] { - const { importedValues, parentName, path, tokens } = info; - let returnTokens = tokens.slice(); - const propertyName = path[path.length - 1] ?? parentName; - - // Check if the value is an imported value reference - if (importedValues.has(value)) { - // Cast to ImportedValue as we know the value exists - const importedValue = importedValues.get(value) as ImportedValue; - - // If we've already pre-resolved tokens for this value, use them - if (importedValue.resolvedTokens) { - return importedValue.resolvedTokens.map((token) => ({ - ...token, - property: propertyName, // Update property name for current context - path: path, // Update path for current context - })); - } - - if (importedValue.isLiteral) { - if (importedValue.templateSpans) { - // Process template spans specially - for (const span of importedValue.templateSpans) { - if (span.isToken) { - // Direct token reference in span - returnTokens = addTokenToArray( - { - property: propertyName, - token: [span.text], - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }, - returnTokens - ); - } else if (span.isReference && span.referenceName && importedValues.has(span.referenceName)) { - // Reference to another imported value - process recursively - const spanTokens = processImportedStringTokens(info, span.referenceName); - returnTokens.push(...spanTokens); - } else { - // Run the span back through our resolver - returnTokens = resolveToken({ - ...info, - node: span.node, - tokens: returnTokens, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); - } - } - } else { - // Run the span back through our resolver - returnTokens = resolveToken({ - ...info, - node: importedValue.node, - tokens: returnTokens, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); - } - } else { - // Non-literal values (like property access expressions) - if (isTokenReferenceOld(importedValue.value)) { - returnTokens = addTokenToArray( - { - property: propertyName, - token: [importedValue.value], - path, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }, - returnTokens - ); - } else { - // Run the span back through our resolver - returnTokens = resolveToken({ - ...info, - node: importedValue.node, - tokens: returnTokens, - isVariableReference: true, - sourceFile: importedValue.sourceFile, - }); - } - } - - // Cache the resolved tokens for future use - importedValue.resolvedTokens = returnTokens.map((token) => ({ ...token })); - } - - return returnTokens; -} diff --git a/packages/token-analyzer/src/tokenUtils.ts b/packages/token-analyzer/src/tokenUtils.ts index 6087cab74..30b83c086 100644 --- a/packages/token-analyzer/src/tokenUtils.ts +++ b/packages/token-analyzer/src/tokenUtils.ts @@ -1,5 +1,5 @@ // tokenUtils.ts -import { Symbol, SyntaxKind, Node, ImportSpecifier } from 'ts-morph'; +import { Symbol, SyntaxKind, Node } from 'ts-morph'; import { TOKEN_REGEX, TokenReference, TokenResolverInfo } from './types.js'; import { shorthands } from '@griffel/react'; @@ -61,7 +61,7 @@ export function isTokenReferenceOld(textOrNode: string | Node | Symbol): boolean return test; } -export function getExpresionFromIdentifier(node: Node): Node | undefined { +export function getInitializerFromIdentifier(node: Node): Node | undefined { const nodeSymbol = node.getSymbol(); const nodeDeclarations = nodeSymbol?.getDeclarations(); if (nodeSymbol && nodeDeclarations && nodeDeclarations.length > 0) { @@ -94,7 +94,7 @@ export function extractTokensFromText(textOrNode: string | Node | Symbol): strin } else { const spanExpression = span.getExpression(); if (spanExpression.getKind() === SyntaxKind.Identifier) { - const spanInitializer = getExpresionFromIdentifier(spanExpression); + const spanInitializer = getInitializerFromIdentifier(spanExpression); if (spanInitializer) { matches.push(...extractTokensFromText(spanInitializer)); } @@ -104,7 +104,7 @@ export function extractTokensFromText(textOrNode: string | Node | Symbol): strin } else if (Node.isNode(textOrNode)) { // If we have an identifier, we need to check if it has an initializer. From there we should reprocess to extract tokens if (Node.isIdentifier(textOrNode)) { - const initializer = getExpresionFromIdentifier(textOrNode); + const initializer = getInitializerFromIdentifier(textOrNode); if (initializer) { matches.push(...extractTokensFromText(initializer)); } From a5eafead29346305b1621739448d1f31b7011888 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 30 Apr 2025 15:14:25 -0700 Subject: [PATCH 43/75] additional fixes --- .../__tests__/test-files/useButtonStyles.styles.ts | 3 ++- packages/token-analyzer/src/tokenResolver.ts | 12 ++++-------- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts index 86ea57c82..cab0fa7f5 100644 --- a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts +++ b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts @@ -16,6 +16,7 @@ export const buttonClassNames: SlotClassNames = { const iconSpacingVar = '--fui-Button__icon--spacing'; const tokenInInitializer = tokens.borderRadiusCircular; +const tokenInInitializer2 = tokens.colorNeutralBackground1; const buttonSpacingSmall = '3px'; const buttonSpacingSmallWithIcon = '1px'; @@ -40,7 +41,7 @@ const useRootBaseClassName = makeResetStyles({ margin: 0, overflow: 'hidden', - backgroundColor: tokens.colorNeutralBackground1, + backgroundColor: tokenInInitializer2, color: semanticTokens.colorNeutralForeground1, border: `${tokens.strokeWidthThin} solid ${tokens.colorNeutralStroke1}`, diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index 76f520204..052157f60 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -21,9 +21,7 @@ import { getInitializerFromIdentifier, getPropertiesForShorthand, isTokenReference, - isTokenReferenceOld, } from './tokenUtils'; -import { ImportedValue } from './importAnalyzer'; /** * Function that centarlizes the logic for resolving tokens from a node. @@ -80,15 +78,14 @@ const processStringLiteral = (info: TokenResolverInfo): TokenRefe }; const processIdentifier = (info: TokenResolverInfo): TokenReference[] => { - const { node, importedValues, parentName, path, tokens, isVariableReference, sourceFile } = info; + const { node, parentName, path, tokens, isVariableReference, sourceFile } = info; let returnTokens = tokens.slice(); const text = node.getText(); - console.log('Processing identifier', text, getInitializerFromIdentifier(node)); + const intializerNode = getInitializerFromIdentifier(node); // knownTokenPackage is set to false for our importTest if (isTokenReference(info)) { - console.log('Found a token reference', text); // Found a token, we should process and return it const propertyName = path[path.length - 1] ?? parentName; returnTokens = addTokenToArray( @@ -101,10 +98,9 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ isVariableReference, sourceFile ); - } else if (getInitializerFromIdentifier(node)) { + } else if (intializerNode) { // we have a variable declaration and we should then check if the value is a token as well. Reprocess the node - console.log(getInitializerFromIdentifier(node)?.getText()); - // return + returnTokens = returnTokens.concat(resolveToken({ ...info, node: intializerNode })); } return returnTokens; From c5669bfb3f3ff9c8b5ff25e8c03b35b2430c64c5 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 2 May 2025 01:33:24 -0700 Subject: [PATCH 44/75] hoist module check create getModuleSpecifierFromExportSymbol --- packages/token-analyzer/src/importAnalyzer.ts | 178 +++++++++++------- 1 file changed, 115 insertions(+), 63 deletions(-) diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index dbb6c22c7..908491fbb 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -100,45 +100,44 @@ function processNamedImports( const importName = namedImport.getName(); const alias = namedImport.getAliasNode()?.getText() || importName; - // Find the export's true source using TypeScript's type checker - const exportInfo = findExportDeclaration(importedFile, importName, typeChecker); - - if (exportInfo) { - const { declaration, sourceFile: declarationFile } = exportInfo; - - // We need to first check if the import is coming from a known token package - - // Extract the value from the declaration - const valueInfo = extractValueFromDeclaration(declaration, typeChecker); - - const knownTokenKeys = Object.keys(knownTokenImportsAndModules); - - // We should process the imports module import first to determrine if it's a known token package - // If it's not, we can then process it's value as it's likely another file within the application or library. - if ( - (knownTokenKeys.includes(importName) && knownTokenImportsAndModules[importName].includes(moduleSpecifier)) || - knownTokenImportsAndModules.default.includes(moduleSpecifier) - ) { - importedValues.set(alias, { - value: importName, - sourceFile: declarationFile.getFilePath(), - isLiteral: false, - node: namedImport, // Use the alias node if available, otherwise use the declaration - knownTokenPackage: true, - }); + // We should process the imports module import first to determine if it's a known token package + // If it's not, we can then process it's value as it's likely another file within the application or library. + if (isKnownTokenPackage(moduleSpecifier, importName)) { + importedValues.set(alias, { + value: importName, + sourceFile: importedFile.getFilePath(), + isLiteral: false, + node: namedImport, // Use the alias node if available, otherwise use the declaration + knownTokenPackage: true, + }); - log(`Added known token import: ${alias} = ${importName} from ${declarationFile.getFilePath()}`); - } else if (valueInfo) { - importedValues.set(alias, { - value: valueInfo.value, - sourceFile: declarationFile.getFilePath(), - isLiteral: valueInfo.isLiteral, - templateSpans: valueInfo.templateSpans, - node: declaration, - knownTokenPackage: false, - }); + log(`Added known token import: ${alias} = ${importName} from ${importedFile.getFilePath()}`); + } else { + // Find the export's true source using TypeScript's type checker + const exportInfo = findExportDeclaration(importedFile, importName, typeChecker); + + if (exportInfo) { + const { declaration, sourceFile: declarationFile } = exportInfo; + + // We need to first check if the import is coming from a known token package + + // Extract the value from the declaration + const valueInfo = extractValueFromDeclaration(declaration, typeChecker); + if (valueInfo) { + // We don't have a direct known token import, so process where the value is declared and determine if that's a + // known token package or not. If not, we can omit the value. + + importedValues.set(alias, { + value: valueInfo.value, + sourceFile: declarationFile.getFilePath(), + isLiteral: valueInfo.isLiteral, + templateSpans: valueInfo.templateSpans, + node: declaration, + knownTokenPackage: false, + }); - log(`Added imported value: ${alias} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); + log(`Added imported value: ${alias} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); + } } } } @@ -163,34 +162,35 @@ function processDefaultImport( const importName = defaultImport.getText(); - // Find the default export's true source - const exportInfo = findExportDeclaration(importedFile, 'default', typeChecker); - - if (exportInfo) { - const { declaration, sourceFile: declarationFile } = exportInfo; + if (isKnownTokenPackage(moduleSpecifier)) { + importedValues.set(importName, { + value: importName, + sourceFile: importedFile.getFilePath(), + isLiteral: false, + node: importDecl, + knownTokenPackage: true, + }); + } else { + // Find the default export's true source + const exportInfo = findExportDeclaration(importedFile, 'default', typeChecker); - // Extract the value from the declaration - const valueInfo = extractValueFromDeclaration(declaration, typeChecker); + if (exportInfo) { + const { declaration, sourceFile: declarationFile } = exportInfo; - if (knownTokenImportsAndModules.default.includes(moduleSpecifier)) { - importedValues.set(importName, { - value: importName, - sourceFile: declarationFile.getFilePath(), - isLiteral: false, - node: declaration, - knownTokenPackage: true, - }); - } else if (valueInfo) { - importedValues.set(importName, { - value: valueInfo.value, - sourceFile: declarationFile.getFilePath(), - isLiteral: valueInfo.isLiteral, - templateSpans: valueInfo.templateSpans, - node: declaration, - knownTokenPackage: false, - }); + // Extract the value from the declaration + const valueInfo = extractValueFromDeclaration(declaration, typeChecker); + if (valueInfo) { + importedValues.set(importName, { + value: valueInfo.value, + sourceFile: declarationFile.getFilePath(), + isLiteral: valueInfo.isLiteral, + templateSpans: valueInfo.templateSpans, + node: declaration, + knownTokenPackage: false, + }); - log(`Added default import: ${importName} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); + log(`Added default import: ${importName} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); + } } } } @@ -208,9 +208,13 @@ function processNamespaceImport( log(`No namespace import found in ${importDecl.getModuleSpecifierValue()}`); return; } + + // We need to resolve any re-exports to find the true source of the namespace import just as we do with the + // other import types. + const importName = namespaceImport.getText(); // Find the default export's true source - if (knownTokenImportsAndModules.default.includes(moduleSpecifier)) { + if (isKnownTokenPackage(moduleSpecifier)) { importedValues.set(importName, { value: importName, sourceFile: importedFile.getFilePath(), @@ -221,6 +225,40 @@ function processNamespaceImport( } } +function getModuleSpecifierFromExportSymbol(symbol: Symbol): { + moduleSpecifier: string | undefined; + sourceFile: SourceFile | undefined; + declaration: Node | undefined; +} { + let moduleSpecifier: string | undefined; + let sourceFile: SourceFile | undefined; + let declaration: Node | undefined; + symbol.getDeclarations().forEach((declaration) => { + // Walk the tree until we find an ExportDeclaration + let currentDeclaration: Node | undefined = declaration; + while (Node.isExportSpecifier(currentDeclaration) || Node.isNamedExports(currentDeclaration)) { + currentDeclaration = currentDeclaration.getParent(); + } + + if (Node.isExportDeclaration(currentDeclaration)) { + moduleSpecifier = currentDeclaration.getModuleSpecifierValue(); + sourceFile = currentDeclaration.getSourceFile(); + declaration = currentDeclaration; + } + }); + return { moduleSpecifier, sourceFile, declaration }; +} + +function isKnownTokenPackage(moduleSpecifier: string, valueName?: string): boolean { + const knownTokenKeys = Object.keys(knownTokenImportsAndModules); + return ( + (valueName !== undefined && + knownTokenKeys.includes(valueName) && + knownTokenImportsAndModules[valueName].includes(moduleSpecifier)) || + knownTokenImportsAndModules.default.includes(moduleSpecifier) + ); +} + /** * Find an export's original declaration using TypeScript's type checker */ @@ -251,6 +289,20 @@ function findExportDeclaration( return undefined; } + // Get the module specifier for this export + const { + moduleSpecifier, + sourceFile: moduleSourceFile, + declaration: moduleDeclaration, + } = getModuleSpecifierFromExportSymbol(exportSymbol); + + let isTokenModule = false; + if (moduleSpecifier) { + isTokenModule = isKnownTokenPackage(moduleSpecifier, exportSymbol.getName()); + } + + console.log(`Module specifier for ${exportName}: ${moduleSpecifier}`); + // If this is an alias (re-export), get the original symbol let resolvedSymbol: Symbol = exportSymbol; if (exportSymbol.isAlias()) { From 8f7c9601bf1e92d1dd64ebc93f179843e35eaec1 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Mon, 5 May 2025 17:45:38 -0700 Subject: [PATCH 45/75] add additional tests for nested import/export types update test code update import analyzer to be more robust --- .../token-analyzer/src/__tests__/e2e.test.ts | 2 +- .../src/__tests__/test-files/import-test.ts | 1 + .../__tests__/test-files/more-import-test.ts | 2 + .../test-files/useButtonStyles.styles.ts | 4 +- packages/token-analyzer/src/importAnalyzer.ts | 386 +++++------------- 5 files changed, 100 insertions(+), 295 deletions(-) create mode 100644 packages/token-analyzer/src/__tests__/test-files/more-import-test.ts diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index 124c37b72..6fd93ffa6 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -77,7 +77,7 @@ describe('e2e test', () => { checkTokens( () => styles.useRootBaseClassName.resetStyles.nested["':hover'"].tokens, [ - ['backgroundColor', 'tokens.colorNeutralBackground1Hover'], + ['backgroundColor', 'colorNeutralForeground1'], ['borderColor', 'anotherToken'], ['color', 'tokens.colorNeutralForeground1Hover'], ] diff --git a/packages/token-analyzer/src/__tests__/test-files/import-test.ts b/packages/token-analyzer/src/__tests__/test-files/import-test.ts index 00525f076..f02ac3e48 100644 --- a/packages/token-analyzer/src/__tests__/test-files/import-test.ts +++ b/packages/token-analyzer/src/__tests__/test-files/import-test.ts @@ -1,3 +1,4 @@ import { anotherToken } from '@fluentui/semantic-tokens'; +export { colorNeutralForeground1 } from './more-import-test'; export const importTest = anotherToken; diff --git a/packages/token-analyzer/src/__tests__/test-files/more-import-test.ts b/packages/token-analyzer/src/__tests__/test-files/more-import-test.ts new file mode 100644 index 000000000..ae77eee44 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/test-files/more-import-test.ts @@ -0,0 +1,2 @@ +// test direct export from the semantic tokens package +export { colorNeutralForeground1 } from '@fluentui/semantic-tokens'; diff --git a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts index cab0fa7f5..c011994d8 100644 --- a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts +++ b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts @@ -6,7 +6,7 @@ import type { SlotClassNames } from '@fluentui/react-utilities'; import type { ButtonSlots, ButtonState } from '@fluentui/react-components'; import * as semanticTokens from '@fluentui/semantic-tokens'; import { someToken } from '@fluentui/semantic-tokens'; -import { importTest } from './import-test'; +import { importTest, colorNeutralForeground1 } from './import-test'; export const buttonClassNames: SlotClassNames = { root: 'fui-Button', @@ -49,7 +49,7 @@ const useRootBaseClassName = makeResetStyles({ outlineStyle: 'none', ':hover': { - backgroundColor: tokens.colorNeutralBackground1Hover, + backgroundColor: colorNeutralForeground1, borderColor: importTest, color: tokens.colorNeutralForeground1Hover, diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index 908491fbb..d1408116c 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -1,34 +1,16 @@ // importAnalyzer.ts -import { Project, Node, SourceFile, ImportDeclaration, Symbol, TypeChecker, SyntaxKind } from 'ts-morph'; +import { Project, Node, SourceFile, ImportDeclaration, Symbol, TypeChecker } from 'ts-morph'; import { log } from './debugUtils.js'; -import { knownTokenImportsAndModules, TokenReference } from './types.js'; +import { knownTokenImportsAndModules } from './types.js'; import { getModuleSourceFile } from './moduleResolver.js'; -import { isTokenReferenceOld } from './tokenUtils.js'; - -/** - * Represents a portion of a template expression - */ -interface TemplateSpan { - text: string; // The actual text content - isToken: boolean; // Whether this span is a token reference - isReference: boolean; // Whether this span is a reference to another variable - referenceName?: string; // The name of the referenced variable if isReference is true - node: Node; -} /** * Represents a value imported from another module */ export interface ImportedValue { value: string; - sourceFile: string; - isLiteral: boolean; node: Node; knownTokenPackage: boolean; - - // Enhanced fields for template processing - templateSpans?: TemplateSpan[]; // For template expressions with spans - resolvedTokens?: TokenReference[]; // Pre-extracted tokens from this value } /** @@ -97,6 +79,9 @@ function processNamedImports( moduleSpecifier: string ): void { for (const namedImport of importDecl.getNamedImports()) { + // We want to keep the node reference the same as the original import so when we do equality checks we can ensure + // we're going to get a valid result. If we moved to use a nested value we'd never get a true result across files. + const nameOrAliasNode = namedImport.getAliasNode() ?? namedImport; const importName = namedImport.getName(); const alias = namedImport.getAliasNode()?.getText() || importName; @@ -105,9 +90,7 @@ function processNamedImports( if (isKnownTokenPackage(moduleSpecifier, importName)) { importedValues.set(alias, { value: importName, - sourceFile: importedFile.getFilePath(), - isLiteral: false, - node: namedImport, // Use the alias node if available, otherwise use the declaration + node: nameOrAliasNode, // Use the alias node if available, otherwise use the declaration knownTokenPackage: true, }); @@ -117,23 +100,19 @@ function processNamedImports( const exportInfo = findExportDeclaration(importedFile, importName, typeChecker); if (exportInfo) { - const { declaration, sourceFile: declarationFile } = exportInfo; - + const { declaration, sourceFile: declarationFile, moduleSpecifier: exportModuleSpecifier } = exportInfo; // We need to first check if the import is coming from a known token package // Extract the value from the declaration - const valueInfo = extractValueFromDeclaration(declaration, typeChecker); + const valueInfo = extractValueFromDeclaration(declaration); if (valueInfo) { // We don't have a direct known token import, so process where the value is declared and determine if that's a // known token package or not. If not, we can omit the value. importedValues.set(alias, { value: valueInfo.value, - sourceFile: declarationFile.getFilePath(), - isLiteral: valueInfo.isLiteral, - templateSpans: valueInfo.templateSpans, - node: declaration, - knownTokenPackage: false, + node: nameOrAliasNode, + knownTokenPackage: isKnownTokenPackage(exportModuleSpecifier, importName), }); log(`Added imported value: ${alias} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); @@ -165,8 +144,6 @@ function processDefaultImport( if (isKnownTokenPackage(moduleSpecifier)) { importedValues.set(importName, { value: importName, - sourceFile: importedFile.getFilePath(), - isLiteral: false, node: importDecl, knownTokenPackage: true, }); @@ -178,13 +155,10 @@ function processDefaultImport( const { declaration, sourceFile: declarationFile } = exportInfo; // Extract the value from the declaration - const valueInfo = extractValueFromDeclaration(declaration, typeChecker); + const valueInfo = extractValueFromDeclaration(declaration); if (valueInfo) { importedValues.set(importName, { value: valueInfo.value, - sourceFile: declarationFile.getFilePath(), - isLiteral: valueInfo.isLiteral, - templateSpans: valueInfo.templateSpans, node: declaration, knownTokenPackage: false, }); @@ -210,15 +184,14 @@ function processNamespaceImport( } // We need to resolve any re-exports to find the true source of the namespace import just as we do with the - // other import types. + // other import types. Mark as TODO to handle this in the future. We don't expect many to deeply nest namespace + // imports but it's possible. We can prioritize if we see this as a common pattern. const importName = namespaceImport.getText(); // Find the default export's true source if (isKnownTokenPackage(moduleSpecifier)) { importedValues.set(importName, { value: importName, - sourceFile: importedFile.getFilePath(), - isLiteral: false, node: namespaceImport, knownTokenPackage: true, }); @@ -234,16 +207,35 @@ function getModuleSpecifierFromExportSymbol(symbol: Symbol): { let sourceFile: SourceFile | undefined; let declaration: Node | undefined; symbol.getDeclarations().forEach((declaration) => { - // Walk the tree until we find an ExportDeclaration - let currentDeclaration: Node | undefined = declaration; - while (Node.isExportSpecifier(currentDeclaration) || Node.isNamedExports(currentDeclaration)) { - currentDeclaration = currentDeclaration.getParent(); - } + if (Node.isVariableDeclaration(declaration)) { + const varSymbol = declaration.getInitializer()?.getSymbol(); + if (varSymbol) { + const varImportSpecifier = varSymbol.getDeclarations().find((varDeclaration) => { + return Node.isImportSpecifier(varDeclaration); + }); + const varImportSymbol = varImportSpecifier?.getSymbol(); + if (varImportSymbol) { + return getModuleSpecifierFromExportSymbol(varImportSymbol); + } + } + } else { + // Walk the tree until we find an ExportDeclaration + let currentDeclaration: Node | undefined = declaration; + while ( + Node.isExportSpecifier(currentDeclaration) || + Node.isNamedExports(currentDeclaration) || + Node.isImportSpecifier(currentDeclaration) || + Node.isNamedImports(currentDeclaration) || + Node.isImportClause(currentDeclaration) + ) { + currentDeclaration = currentDeclaration.getParent(); + } - if (Node.isExportDeclaration(currentDeclaration)) { - moduleSpecifier = currentDeclaration.getModuleSpecifierValue(); - sourceFile = currentDeclaration.getSourceFile(); - declaration = currentDeclaration; + if (Node.isExportDeclaration(currentDeclaration) || Node.isImportDeclaration(currentDeclaration)) { + moduleSpecifier = currentDeclaration.getModuleSpecifierValue(); + sourceFile = currentDeclaration.getSourceFile(); + declaration = currentDeclaration; + } } }); return { moduleSpecifier, sourceFile, declaration }; @@ -259,6 +251,46 @@ function isKnownTokenPackage(moduleSpecifier: string, valueName?: string): boole ); } +/** + * Function that walks up the aliases to find the nearest import/export declaration with a known token package + */ +function findNearestKnownTokenInfo( + exportSymbol: Symbol, + typeChecker: TypeChecker +): + | { + knownTokenSymbol: Symbol; + knownTokenModuleSpecifier: string; + knownTokenSourceFile?: SourceFile; + knownTokenDeclaration?: Node; + } + | undefined { + // Get the module specifier if we're an export specifier + const { moduleSpecifier, sourceFile, declaration } = getModuleSpecifierFromExportSymbol(exportSymbol); + + const isAlias = exportSymbol.isAlias(); + if (moduleSpecifier) { + // If this is an alias (re-export), get the original symbol + let resolvedSymbol: Symbol = exportSymbol; + if (isAlias) { + // we're ok type casting here because we know the symbol is an alias from the previous check but TS won't pick up on it + resolvedSymbol = typeChecker.getImmediatelyAliasedSymbol(exportSymbol) as Symbol; + // console.log(`Resolved alias to: ${resolvedSymbol.getName()}`, moduleSpecifier); + if (isKnownTokenPackage(moduleSpecifier, resolvedSymbol.getName())) { + return { + knownTokenSymbol: resolvedSymbol, + knownTokenModuleSpecifier: moduleSpecifier, + knownTokenSourceFile: sourceFile, + knownTokenDeclaration: declaration ?? resolvedSymbol.getValueDeclaration(), + }; + } else { + return findNearestKnownTokenInfo(resolvedSymbol, typeChecker); + } + } + } + return undefined; +} + /** * Find an export's original declaration using TypeScript's type checker */ @@ -266,7 +298,7 @@ function findExportDeclaration( sourceFile: SourceFile, exportName: string, typeChecker: TypeChecker -): { declaration: Node; sourceFile: SourceFile } | undefined { +): { declaration: Node; sourceFile: SourceFile; moduleSpecifier: string } | undefined { try { // Get the source file's symbol (represents the module) const sourceFileSymbol = typeChecker.getSymbolAtLocation(sourceFile); @@ -289,58 +321,15 @@ function findExportDeclaration( return undefined; } - // Get the module specifier for this export - const { - moduleSpecifier, - sourceFile: moduleSourceFile, - declaration: moduleDeclaration, - } = getModuleSpecifierFromExportSymbol(exportSymbol); - - let isTokenModule = false; - if (moduleSpecifier) { - isTokenModule = isKnownTokenPackage(moduleSpecifier, exportSymbol.getName()); - } - - console.log(`Module specifier for ${exportName}: ${moduleSpecifier}`); - - // If this is an alias (re-export), get the original symbol - let resolvedSymbol: Symbol = exportSymbol; - if (exportSymbol.isAlias()) { - // we're ok type casting here because we know the symbol is an alias from the previous check but TS won't pick up on it - resolvedSymbol = typeChecker.getAliasedSymbol(exportSymbol) as Symbol; - log(`Resolved alias to: ${resolvedSymbol.getName()}`); - } - - // Get the value declaration from the resolved symbol - const valueDeclaration = resolvedSymbol.getValueDeclaration(); - if (!valueDeclaration) { - log(`No value declaration found for ${exportName}`); - - // Fallback to any declaration if value declaration is not available - const declarations = resolvedSymbol.getDeclarations(); - if (!declarations || declarations.length === 0) { - log(`No declarations found for ${exportName}`); - return undefined; - } - - const declaration = declarations[0]; - const declarationSourceFile = declaration.getSourceFile(); + const tokenInfo = findNearestKnownTokenInfo(exportSymbol, typeChecker); + if (tokenInfo && tokenInfo.knownTokenDeclaration && tokenInfo.knownTokenSourceFile) { return { - declaration, - sourceFile: declarationSourceFile, + declaration: tokenInfo.knownTokenDeclaration, + sourceFile: tokenInfo.knownTokenSourceFile, + moduleSpecifier: tokenInfo.knownTokenModuleSpecifier, }; } - - const declarationSourceFile = valueDeclaration.getSourceFile(); - - log( - `Found declaration for '${exportName}': ${valueDeclaration.getKindName()} in ${declarationSourceFile.getFilePath()}` - ); - return { - declaration: valueDeclaration, - sourceFile: declarationSourceFile, - }; } catch (err) { log(`Error finding export declaration for ${exportName}:`, err); return undefined; @@ -350,204 +339,17 @@ function findExportDeclaration( /** * Extract string value from a declaration node */ -function extractValueFromDeclaration( - declaration: Node, - typeChecker: TypeChecker -): { value: string; isLiteral: boolean; templateSpans?: TemplateSpan[] } | undefined { +function extractValueFromDeclaration(declaration: Node): { value: string } | undefined { // Handle variable declarations if (Node.isVariableDeclaration(declaration)) { - const initializer = declaration.getInitializer(); - return extractValueFromExpression(initializer, typeChecker); - } - // Handle export assignments (export default "value") - if (Node.isExportAssignment(declaration)) { - const expression = declaration.getExpression(); - return extractValueFromExpression(expression, typeChecker); - } - - // Handle named exports (export { x }) - if (Node.isExportSpecifier(declaration)) { - // Find the local symbol this specifier refers to - const name = declaration.getNameNode().getText(); - const sourceFile = declaration.getSourceFile(); - - // Find the local declaration with this name - for (const varDecl of sourceFile.getDescendantsOfKind(SyntaxKind.VariableDeclaration)) { - if (varDecl.getName() === name) { - const initializer = varDecl.getInitializer(); - return extractValueFromExpression(initializer, typeChecker); - } - } + return { value: declaration.getNameNode().getText() }; } - return undefined; -} - -/** - * Extract value from an expression node with enhanced template literal handling - */ -function extractValueFromExpression( - expression: Node | undefined, - typeChecker: TypeChecker -): - | { - value: string; - isLiteral: boolean; - templateSpans?: TemplateSpan[]; - node: Node; - } - | undefined { - if (!expression) { - return undefined; - } - - if (Node.isStringLiteral(expression)) { - return { - value: expression.getLiteralValue(), - isLiteral: true, - node: expression, - }; - } else if (Node.isTemplateExpression(expression)) { - // Process the template head and spans fully - const head = expression.getHead().getLiteralText(); - const spans = expression.getTemplateSpans(); - - let fullValue = head; - const templateSpans: TemplateSpan[] = []; - - // Add head as a non-token span if it's not empty - if (head) { - templateSpans.push({ - text: head, - isToken: false, - isReference: false, - node: expression.getHead(), - }); - } - - // Process each span in the template expression - for (const span of spans) { - const spanExpr = span.getExpression(); - const spanText = spanExpr.getText(); - const literal = span.getLiteral().getLiteralText(); - - // Handle different types of expressions in template spans - if (Node.isPropertyAccessExpression(spanExpr) && isTokenReferenceOld(spanExpr)) { - // Direct token reference in template span - templateSpans.push({ - text: spanText, - isToken: true, - isReference: false, - node: spanExpr, - }); - fullValue += spanText; - } else if (Node.isIdentifier(spanExpr)) { - // Potential reference to another variable - templateSpans.push({ - text: spanText, - isToken: false, - isReference: true, - referenceName: spanText, - node: spanExpr, - }); - fullValue += spanText; - } else { - // Other expression types - try to resolve recursively - const resolvedExpr = extractValueFromExpression(spanExpr, typeChecker); - if (resolvedExpr) { - if (resolvedExpr.templateSpans) { - // If it has its own spans, include them - templateSpans.push(...resolvedExpr.templateSpans); - } else { - // Otherwise add the value - templateSpans.push({ - text: resolvedExpr.value, - isToken: false, - isReference: false, - node: resolvedExpr.node, - }); - } - fullValue += resolvedExpr.value; - } else { - // Fallback to the raw text if we can't resolve - templateSpans.push({ - text: spanText, - isToken: false, - isReference: false, - node: spanExpr, - }); - fullValue += spanText; - } - } - - // Add the literal part that follows the expression - if (literal) { - templateSpans.push({ - text: literal, - isToken: false, - isReference: false, - node: span.getLiteral(), - }); - fullValue += literal; - } - } - - return { - value: fullValue, - isLiteral: true, - templateSpans, - node: expression, - }; - } else if (Node.isIdentifier(expression)) { - // Try to resolve the identifier to its value - const symbol = expression.getSymbol(); - if (!symbol) { - return { - value: expression.getText(), - isLiteral: false, - node: expression, - }; - } - - // Get the declaration of this identifier - const decl = symbol.getValueDeclaration() || symbol.getDeclarations()?.[0]; - if (!decl) { - return { - value: expression.getText(), - isLiteral: false, - node: expression, - }; - } - - // If it's a variable declaration, get its initializer - if (Node.isVariableDeclaration(decl)) { - const initializer = decl.getInitializer(); - if (initializer) { - // Recursively resolve the initializer - return extractValueFromExpression(initializer, typeChecker); - } - } - - return { - value: expression.getText(), - isLiteral: false, - node: expression, - }; - } else if (Node.isPropertyAccessExpression(expression)) { - // Handle tokens.xyz or other property access - return { - value: expression.getText(), - isLiteral: false, - node: expression, - }; - } else if (Node.isNoSubstitutionTemplateLiteral(expression)) { - return { - value: expression.getLiteralValue(), - isLiteral: true, - node: expression, - }; - } + // TODO IF NEEDED + // Handle right side of declaration if the assignment is from a known token package + // Handle template literals here if needed (we don't so far but may). + // We might also need to fully process the value but we'd do this by calling getAliasNode() on the value node and + // then getting the value declaration. - // Default case for unhandled expression types return undefined; } From 402c8fee9f00ae2bfe6b9b67a33211486d637d48 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Mon, 5 May 2025 19:44:32 -0700 Subject: [PATCH 46/75] getting import/export values sorted --- packages/token-analyzer/src/importAnalyzer.ts | 108 +++++++++++++----- packages/token-analyzer/src/tokenResolver.ts | 4 +- 2 files changed, 80 insertions(+), 32 deletions(-) diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index d1408116c..4590a0fae 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -100,22 +100,32 @@ function processNamedImports( const exportInfo = findExportDeclaration(importedFile, importName, typeChecker); if (exportInfo) { - const { declaration, sourceFile: declarationFile, moduleSpecifier: exportModuleSpecifier } = exportInfo; + const { + declaration, + sourceFile: declarationFile, + moduleSpecifier: exportModuleSpecifier, + importExportSpecifier, + importExportSpecifierName, + } = exportInfo; // We need to first check if the import is coming from a known token package // Extract the value from the declaration + // TODO find the value of the token and then pass it into the values. This might be a string or template literal const valueInfo = extractValueFromDeclaration(declaration); - if (valueInfo) { + + if (isKnownTokenPackage(exportModuleSpecifier, importName)) { // We don't have a direct known token import, so process where the value is declared and determine if that's a // known token package or not. If not, we can omit the value. importedValues.set(alias, { - value: valueInfo.value, + // TODO we should set the value to the end token resolution, for now we will process to the import if this is an import + // which we need to get from findExportDeclaration and the processing within that function + value: importExportSpecifierName ?? importName, node: nameOrAliasNode, - knownTokenPackage: isKnownTokenPackage(exportModuleSpecifier, importName), + knownTokenPackage: true, }); - log(`Added imported value: ${alias} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); + log(`Added imported value: ${alias} = from ${exportModuleSpecifier}`); } } } @@ -202,25 +212,38 @@ function getModuleSpecifierFromExportSymbol(symbol: Symbol): { moduleSpecifier: string | undefined; sourceFile: SourceFile | undefined; declaration: Node | undefined; + specifier: Node | undefined; + specifierName: string | undefined; } { let moduleSpecifier: string | undefined; let sourceFile: SourceFile | undefined; let declaration: Node | undefined; - symbol.getDeclarations().forEach((declaration) => { - if (Node.isVariableDeclaration(declaration)) { - const varSymbol = declaration.getInitializer()?.getSymbol(); + let specifier: Node | undefined; + let specifierName: string | undefined; + symbol.getDeclarations().forEach((symbolDeclaration) => { + if (Node.isVariableDeclaration(symbolDeclaration)) { + const varSymbol = symbolDeclaration.getInitializer()?.getSymbol(); if (varSymbol) { const varImportSpecifier = varSymbol.getDeclarations().find((varDeclaration) => { return Node.isImportSpecifier(varDeclaration); }); const varImportSymbol = varImportSpecifier?.getSymbol(); + specifier = varImportSpecifier; + specifierName = varImportSpecifier?.getName(); if (varImportSymbol) { - return getModuleSpecifierFromExportSymbol(varImportSymbol); + const { + moduleSpecifier: newSpecifier, + sourceFile: newSourceFile, + declaration: newDeclaration, + } = getModuleSpecifierFromExportSymbol(varImportSymbol); + moduleSpecifier = newSpecifier; + sourceFile = newSourceFile; + declaration = newDeclaration; } } } else { // Walk the tree until we find an ExportDeclaration - let currentDeclaration: Node | undefined = declaration; + let currentDeclaration: Node | undefined = symbolDeclaration; while ( Node.isExportSpecifier(currentDeclaration) || Node.isNamedExports(currentDeclaration) || @@ -228,6 +251,10 @@ function getModuleSpecifierFromExportSymbol(symbol: Symbol): { Node.isNamedImports(currentDeclaration) || Node.isImportClause(currentDeclaration) ) { + if (Node.isExportSpecifier(currentDeclaration) || Node.isImportSpecifier(currentDeclaration)) { + specifier = currentDeclaration; + specifierName = currentDeclaration.getName(); + } currentDeclaration = currentDeclaration.getParent(); } @@ -238,7 +265,7 @@ function getModuleSpecifierFromExportSymbol(symbol: Symbol): { } } }); - return { moduleSpecifier, sourceFile, declaration }; + return { moduleSpecifier, sourceFile, declaration, specifier, specifierName }; } function isKnownTokenPackage(moduleSpecifier: string, valueName?: string): boolean { @@ -259,32 +286,43 @@ function findNearestKnownTokenInfo( typeChecker: TypeChecker ): | { - knownTokenSymbol: Symbol; knownTokenModuleSpecifier: string; knownTokenSourceFile?: SourceFile; knownTokenDeclaration?: Node; + knownTokenImportExportName?: string; + knownTokenImportExportSpecifier?: Node; } | undefined { // Get the module specifier if we're an export specifier - const { moduleSpecifier, sourceFile, declaration } = getModuleSpecifierFromExportSymbol(exportSymbol); - + const { moduleSpecifier, sourceFile, declaration, specifier, specifierName } = + getModuleSpecifierFromExportSymbol(exportSymbol); const isAlias = exportSymbol.isAlias(); if (moduleSpecifier) { - // If this is an alias (re-export), get the original symbol - let resolvedSymbol: Symbol = exportSymbol; - if (isAlias) { - // we're ok type casting here because we know the symbol is an alias from the previous check but TS won't pick up on it - resolvedSymbol = typeChecker.getImmediatelyAliasedSymbol(exportSymbol) as Symbol; - // console.log(`Resolved alias to: ${resolvedSymbol.getName()}`, moduleSpecifier); - if (isKnownTokenPackage(moduleSpecifier, resolvedSymbol.getName())) { - return { - knownTokenSymbol: resolvedSymbol, - knownTokenModuleSpecifier: moduleSpecifier, - knownTokenSourceFile: sourceFile, - knownTokenDeclaration: declaration ?? resolvedSymbol.getValueDeclaration(), - }; - } else { - return findNearestKnownTokenInfo(resolvedSymbol, typeChecker); + if (isKnownTokenPackage(moduleSpecifier, exportSymbol.getName())) { + return { + knownTokenModuleSpecifier: moduleSpecifier, + knownTokenSourceFile: sourceFile, + knownTokenDeclaration: declaration ?? exportSymbol.getValueDeclaration(), + knownTokenImportExportName: specifierName, + knownTokenImportExportSpecifier: specifier, + }; + } else { + // If this is an alias (re-export), get the original symbol + let resolvedSymbol: Symbol = exportSymbol; + if (isAlias) { + // we're ok type casting here because we know the symbol is an alias from the previous check but TS won't pick up on it + resolvedSymbol = typeChecker.getImmediatelyAliasedSymbol(exportSymbol) as Symbol; + if (isKnownTokenPackage(moduleSpecifier, resolvedSymbol.getName())) { + return { + knownTokenModuleSpecifier: moduleSpecifier, + knownTokenSourceFile: sourceFile, + knownTokenDeclaration: declaration ?? resolvedSymbol.getValueDeclaration(), + knownTokenImportExportName: specifierName, + knownTokenImportExportSpecifier: specifier, + }; + } else { + return findNearestKnownTokenInfo(resolvedSymbol, typeChecker); + } } } } @@ -298,7 +336,15 @@ function findExportDeclaration( sourceFile: SourceFile, exportName: string, typeChecker: TypeChecker -): { declaration: Node; sourceFile: SourceFile; moduleSpecifier: string } | undefined { +): + | { + declaration: Node; + sourceFile: SourceFile; + moduleSpecifier: string; + importExportSpecifierName?: string; + importExportSpecifier?: Node; + } + | undefined { try { // Get the source file's symbol (represents the module) const sourceFileSymbol = typeChecker.getSymbolAtLocation(sourceFile); @@ -328,6 +374,8 @@ function findExportDeclaration( declaration: tokenInfo.knownTokenDeclaration, sourceFile: tokenInfo.knownTokenSourceFile, moduleSpecifier: tokenInfo.knownTokenModuleSpecifier, + importExportSpecifierName: tokenInfo.knownTokenImportExportName, + importExportSpecifier: tokenInfo.knownTokenImportExportSpecifier, }; } } catch (err) { diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index 052157f60..e28d355d5 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -78,7 +78,7 @@ const processStringLiteral = (info: TokenResolverInfo): TokenRefe }; const processIdentifier = (info: TokenResolverInfo): TokenReference[] => { - const { node, parentName, path, tokens, isVariableReference, sourceFile } = info; + const { node, parentName, path, tokens, isVariableReference, sourceFile, importedValues } = info; let returnTokens = tokens.slice(); @@ -91,7 +91,7 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ returnTokens = addTokenToArray( { property: propertyName, - token: [text], + token: [importedValues.get(text)?.value ?? text], path, }, returnTokens, From 5aed065a2aff375ade05426673c5f37943050fc0 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 7 May 2025 01:02:42 -0700 Subject: [PATCH 47/75] ensure default re-exports work fix additional import abstraction update older tests with actual tokens --- .../src/__tests__/cssVarE2E.test.ts | 10 -- .../src/__tests__/reexportTracking.test.ts | 75 +++------ .../src/__tests__/test-files/analysis.json | 6 +- .../src/__tests__/typeCheckerImports.test.ts | 55 +++---- packages/token-analyzer/src/importAnalyzer.ts | 153 +++++++++++++----- packages/token-analyzer/src/tokenUtils.ts | 4 +- packages/token-analyzer/src/types.ts | 2 +- 7 files changed, 170 insertions(+), 135 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts index 755fa9396..be1227c54 100644 --- a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts +++ b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts @@ -123,7 +123,6 @@ describe('CSS Variable Token Extraction E2E', () => { expect.objectContaining({ property: 'color', token: ['tokens.colorBrandForeground6'], - isVariableReference: true, }) ); @@ -133,7 +132,6 @@ describe('CSS Variable Token Extraction E2E', () => { expect.objectContaining({ property: 'color', token: ['tokens.colorBrandForeground3'], - isVariableReference: true, }) ); @@ -152,7 +150,6 @@ describe('CSS Variable Token Extraction E2E', () => { expect.objectContaining({ property: 'color', token: ['tokens.colorNeutralForeground3'], - isVariableReference: true, }) ); @@ -162,11 +159,9 @@ describe('CSS Variable Token Extraction E2E', () => { expect.arrayContaining([ expect.objectContaining({ token: ['tokens.colorBrandBackground'], - isVariableReference: true, }), expect.objectContaining({ token: ['tokens.colorNeutralBackground1'], - isVariableReference: true, }), ]) ); @@ -281,30 +276,25 @@ describe('CSS Variable Cross-Module Resolution E2E', () => { expect.objectContaining({ property: 'color', token: ['tokens.colorBrandPrimary'], - isVariableReference: true, }), // Import of CSS var with token expect.objectContaining({ property: 'backgroundColor', token: ['tokens.colorBrandPrimary'], - isVariableReference: true, }), // Import of nested CSS var with token expect.objectContaining({ property: 'border', token: ['tokens.colorBrandSecondary'], - isVariableReference: true, }), // Multiple tokens from a complex var expect.objectContaining({ property: 'padding', token: ['tokens.colorBrandPrimary'], - isVariableReference: true, }), expect.objectContaining({ property: 'padding', token: ['tokens.colorBrandSecondary'], - isVariableReference: true, }), ]) ); diff --git a/packages/token-analyzer/src/__tests__/reexportTracking.test.ts b/packages/token-analyzer/src/__tests__/reexportTracking.test.ts index ce78a43a8..eae4df0ea 100644 --- a/packages/token-analyzer/src/__tests__/reexportTracking.test.ts +++ b/packages/token-analyzer/src/__tests__/reexportTracking.test.ts @@ -18,7 +18,7 @@ beforeAll(() => { path.join(TEST_DIR, 'main.ts'), ` import { Component, AliasedValue, Utils, DirectValue } from './index'; - import DefaultExport from './index'; + import DefaultExport from './defaults'; const styles = { component: Component, @@ -34,6 +34,8 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'index.ts'), ` + import { tokens } from '@fluentui/react-theme'; + // Re-export from components export { Component } from './components'; @@ -44,7 +46,7 @@ beforeAll(() => { export * from './utils'; // Direct export - export const DirectValue = 'tokens.direct.value'; + export const DirectValue = tokens.colorNeutralForeground1Hover; // Re-export default export { default } from './defaults'; @@ -55,7 +57,8 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'components.ts'), ` - export const Component = 'tokens.components.primary'; + import { anotherToken } from '@fluentui/semantic-tokens'; + export const Component = anotherToken; ` ); @@ -63,7 +66,8 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'values.ts'), ` - export const Value = 'tokens.values.standard'; + import { tokens } from '@fluentui/react-theme'; + export const Value = tokens.borderRadiusCircular; ` ); @@ -71,7 +75,8 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'utils.ts'), ` - export const Utils = 'tokens.utils.helper'; + import { tokens } from '@fluentui/react-theme'; + export const Utils = tokens.colorNeutralBackground1; ` ); @@ -79,8 +84,9 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'defaults.ts'), ` - const DefaultValue = 'tokens.defaults.main'; - export default DefaultValue; + import { tokens } from '@fluentui/react-theme'; + const DefaultValue = tokens.colorNeutralStroke1; + export default tokens.colorNeutralStroke1; ` ); }); @@ -120,52 +126,35 @@ describe('Re-export tracking', () => { const mainFile = path.join(TEST_DIR, 'main.ts'); const sourceFile = project.addSourceFileAtPath(mainFile); - const importedValues: Map = await analyzeImports( - sourceFile, - project - ); + const importedValues: Map = await analyzeImports(sourceFile, project); // Check that Component was correctly resolved from components.ts expect(importedValues.has('Component')).toBe(true); - expect(importedValues.get('Component')?.value).toBe( - 'tokens.components.primary' - ); - expect(importedValues.get('Component')?.sourceFile).toContain( - 'components.ts' - ); + expect(importedValues.get('Component')?.value).toBe('anotherToken'); + expect(importedValues.get('Component')?.sourceFile).toContain('components.ts'); }); test('follows aliased re-export chain', async () => { const mainFile = path.join(TEST_DIR, 'main.ts'); const sourceFile = project.addSourceFileAtPath(mainFile); - const importedValues: Map = await analyzeImports( - sourceFile, - project - ); + const importedValues: Map = await analyzeImports(sourceFile, project); // Check that AliasedValue was correctly resolved from values.ts expect(importedValues.has('AliasedValue')).toBe(true); - expect(importedValues.get('AliasedValue')?.value).toBe( - 'tokens.values.standard' - ); - expect(importedValues.get('AliasedValue')?.sourceFile).toContain( - 'values.ts' - ); + expect(importedValues.get('AliasedValue')?.value).toBe('tokens.borderRadiusCircular'); + expect(importedValues.get('AliasedValue')?.sourceFile).toContain('values.ts'); }); test('follows namespace re-export', async () => { const mainFile = path.join(TEST_DIR, 'main.ts'); const sourceFile = project.addSourceFileAtPath(mainFile); - const importedValues: Map = await analyzeImports( - sourceFile, - project - ); + const importedValues: Map = await analyzeImports(sourceFile, project); // Check that Utils from namespace export was correctly resolved expect(importedValues.has('Utils')).toBe(true); - expect(importedValues.get('Utils')?.value).toBe('tokens.utils.helper'); + expect(importedValues.get('Utils')?.value).toBe('tokens.colorNeutralBackground1'); expect(importedValues.get('Utils')?.sourceFile).toContain('utils.ts'); }); @@ -173,16 +162,11 @@ describe('Re-export tracking', () => { const mainFile = path.join(TEST_DIR, 'main.ts'); const sourceFile = project.addSourceFileAtPath(mainFile); - const importedValues: Map = await analyzeImports( - sourceFile, - project - ); + const importedValues: Map = await analyzeImports(sourceFile, project); // Check that DirectValue was correctly resolved from index.ts expect(importedValues.has('DirectValue')).toBe(true); - expect(importedValues.get('DirectValue')?.value).toBe( - 'tokens.direct.value' - ); + expect(importedValues.get('DirectValue')?.value).toBe('tokens.colorNeutralForeground1Hover'); expect(importedValues.get('DirectValue')?.sourceFile).toContain('index.ts'); }); @@ -190,18 +174,11 @@ describe('Re-export tracking', () => { const mainFile = path.join(TEST_DIR, 'main.ts'); const sourceFile = project.addSourceFileAtPath(mainFile); - const importedValues: Map = await analyzeImports( - sourceFile, - project - ); + const importedValues: Map = await analyzeImports(sourceFile, project); // Check that DefaultExport was correctly resolved from defaults.ts expect(importedValues.has('DefaultExport')).toBe(true); - expect(importedValues.get('DefaultExport')?.value).toBe( - 'tokens.defaults.main' - ); - expect(importedValues.get('DefaultExport')?.sourceFile).toContain( - 'defaults.ts' - ); + expect(importedValues.get('DefaultExport')?.value).toBe('tokens.colorNeutralStroke1'); + expect(importedValues.get('DefaultExport')?.sourceFile).toContain('defaults.ts'); }); }); diff --git a/packages/token-analyzer/src/__tests__/test-files/analysis.json b/packages/token-analyzer/src/__tests__/test-files/analysis.json index c18c87e16..aaba87ade 100644 --- a/packages/token-analyzer/src/__tests__/test-files/analysis.json +++ b/packages/token-analyzer/src/__tests__/test-files/analysis.json @@ -26,7 +26,7 @@ }, { "property": "fontFamily", - "token": ["tokens.fontFamilyBase"], + "token": ["someToken"], "path": ["fontFamily"] }, { @@ -70,12 +70,12 @@ "tokens": [ { "property": "backgroundColor", - "token": ["tokens.colorNeutralBackground1Hover"], + "token": ["colorNeutralForeground1"], "path": ["':hover'", "backgroundColor"] }, { "property": "borderColor", - "token": ["tokens.colorNeutralStroke1Hover"], + "token": ["anotherToken"], "path": ["':hover'", "borderColor"] }, { diff --git a/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts b/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts index a43cad4e8..151a60a1d 100644 --- a/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts +++ b/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts @@ -18,7 +18,7 @@ beforeAll(() => { path.join(TEST_DIR, 'main.ts'), ` import { Component, AliasedValue, Utils, DirectValue } from './index'; - import DefaultExport from './index'; + import DefaultExport from './defaults'; const styles = { component: Component, @@ -34,6 +34,8 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'index.ts'), ` + import { tokens } from '@fluentui/react-theme'; + // Re-export from components export { Component } from './components'; @@ -44,7 +46,7 @@ beforeAll(() => { export * from './utils'; // Direct export - export const DirectValue = 'tokens.direct.value'; + export const DirectValue = tokens.colorNeutralForeground1Hover; // Re-export default export { default } from './defaults'; @@ -55,7 +57,8 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'components.ts'), ` - export const Component = 'tokens.components.primary'; + import { anotherToken } from '@fluentui/semantic-tokens'; + export const Component = anotherToken; ` ); @@ -63,7 +66,8 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'values.ts'), ` - export const Value = 'tokens.values.standard'; + import { tokens } from '@fluentui/react-theme'; + export const Value = tokens.borderRadiusCircular; ` ); @@ -71,7 +75,8 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'utils.ts'), ` - export const Utils = 'tokens.utils.helper'; + import { tokens } from '@fluentui/react-theme'; + export const Utils = tokens.colorNeutralBackground1; ` ); @@ -79,8 +84,9 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'defaults.ts'), ` - const DefaultValue = 'tokens.defaults.main'; - export default DefaultValue; + import { tokens } from '@fluentui/react-theme'; + const DefaultValue = tokens.colorNeutralStroke1; + export default tokens.colorNeutralStroke1; ` ); }); @@ -123,48 +129,31 @@ describe('Type Checker Import Analysis', () => { // Add all other files to ensure project has complete type information project.addSourceFilesAtPaths([path.join(TEST_DIR, '**/*.ts')]); - const importedValues: Map = await analyzeImports( - sourceFile, - project - ); + const importedValues: Map = await analyzeImports(sourceFile, project); // Verify standard re-export (Component) expect(importedValues.has('Component')).toBe(true); - expect(importedValues.get('Component')?.value).toBe( - 'tokens.components.primary' - ); - expect(importedValues.get('Component')?.sourceFile).toContain( - 'components.ts' - ); + expect(importedValues.get('Component')?.value).toBe('anotherToken'); + expect(importedValues.get('Component')?.sourceFile).toContain('components.ts'); // Verify aliased re-export (AliasedValue) expect(importedValues.has('AliasedValue')).toBe(true); - expect(importedValues.get('AliasedValue')?.value).toBe( - 'tokens.values.standard' - ); - expect(importedValues.get('AliasedValue')?.sourceFile).toContain( - 'values.ts' - ); + expect(importedValues.get('AliasedValue')?.value).toBe('tokens.borderRadiusCircular'); + expect(importedValues.get('AliasedValue')?.sourceFile).toContain('values.ts'); // Verify namespace re-export (Utils) expect(importedValues.has('Utils')).toBe(true); - expect(importedValues.get('Utils')?.value).toBe('tokens.utils.helper'); + expect(importedValues.get('Utils')?.value).toBe('tokens.colorNeutralBackground1'); expect(importedValues.get('Utils')?.sourceFile).toContain('utils.ts'); // Verify direct export (DirectValue) expect(importedValues.has('DirectValue')).toBe(true); - expect(importedValues.get('DirectValue')?.value).toBe( - 'tokens.direct.value' - ); + expect(importedValues.get('DirectValue')?.value).toBe('tokens.colorNeutralForeground1Hover'); expect(importedValues.get('DirectValue')?.sourceFile).toContain('index.ts'); // Verify default export (DefaultExport) expect(importedValues.has('DefaultExport')).toBe(true); - expect(importedValues.get('DefaultExport')?.value).toBe( - 'tokens.defaults.main' - ); - expect(importedValues.get('DefaultExport')?.sourceFile).toContain( - 'defaults.ts' - ); + expect(importedValues.get('DefaultExport')?.value).toBe('tokens.colorNeutralStroke1'); + expect(importedValues.get('DefaultExport')?.sourceFile).toContain('defaults.ts'); }); }); diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index 4590a0fae..e30d977a9 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -3,12 +3,14 @@ import { Project, Node, SourceFile, ImportDeclaration, Symbol, TypeChecker } fro import { log } from './debugUtils.js'; import { knownTokenImportsAndModules } from './types.js'; import { getModuleSourceFile } from './moduleResolver.js'; +import { getInitializerFromIdentifier } from './tokenUtils'; /** * Represents a value imported from another module */ export interface ImportedValue { value: string; + sourceFile: string; node: Node; knownTokenPackage: boolean; } @@ -92,6 +94,7 @@ function processNamedImports( value: importName, node: nameOrAliasNode, // Use the alias node if available, otherwise use the declaration knownTokenPackage: true, + sourceFile: importedFile.getFilePath(), }); log(`Added known token import: ${alias} = ${importName} from ${importedFile.getFilePath()}`); @@ -101,28 +104,23 @@ function processNamedImports( if (exportInfo) { const { - declaration, sourceFile: declarationFile, moduleSpecifier: exportModuleSpecifier, - importExportSpecifier, importExportSpecifierName, + valueDeclarationValue, } = exportInfo; // We need to first check if the import is coming from a known token package - - // Extract the value from the declaration - // TODO find the value of the token and then pass it into the values. This might be a string or template literal - const valueInfo = extractValueFromDeclaration(declaration); - - if (isKnownTokenPackage(exportModuleSpecifier, importName)) { + if (isKnownTokenPackage(exportModuleSpecifier, importExportSpecifierName ?? importName)) { // We don't have a direct known token import, so process where the value is declared and determine if that's a // known token package or not. If not, we can omit the value. importedValues.set(alias, { // TODO we should set the value to the end token resolution, for now we will process to the import if this is an import // which we need to get from findExportDeclaration and the processing within that function - value: importExportSpecifierName ?? importName, + value: valueDeclarationValue ?? importName, node: nameOrAliasNode, knownTokenPackage: true, + sourceFile: declarationFile.getFilePath(), }); log(`Added imported value: ${alias} = from ${exportModuleSpecifier}`); @@ -150,30 +148,39 @@ function processDefaultImport( } const importName = defaultImport.getText(); - if (isKnownTokenPackage(moduleSpecifier)) { importedValues.set(importName, { value: importName, node: importDecl, knownTokenPackage: true, + sourceFile: importedFile.getFilePath(), }); } else { // Find the default export's true source const exportInfo = findExportDeclaration(importedFile, 'default', typeChecker); if (exportInfo) { - const { declaration, sourceFile: declarationFile } = exportInfo; - - // Extract the value from the declaration - const valueInfo = extractValueFromDeclaration(declaration); - if (valueInfo) { + const { + sourceFile: declarationFile, + moduleSpecifier: exportModuleSpecifier, + importExportSpecifierName, + valueDeclarationValue, + } = exportInfo; + + if (isKnownTokenPackage(exportModuleSpecifier, importExportSpecifierName ?? importName)) { importedValues.set(importName, { - value: valueInfo.value, - node: declaration, - knownTokenPackage: false, + // TODO we should set the value to the end token resolution, for now we will process to the import if this is an import + // which we need to get from findExportDeclaration and the processing within that function + value: valueDeclarationValue ?? importName, + node: defaultImport, + knownTokenPackage: true, + sourceFile: declarationFile.getFilePath(), }); - - log(`Added default import: ${importName} = ${valueInfo.value} from ${declarationFile.getFilePath()}`); + log( + `Added default import: ${importName} = ${ + valueDeclarationValue ?? importName + } from ${declarationFile.getFilePath()}` + ); } } } @@ -204,6 +211,7 @@ function processNamespaceImport( value: importName, node: namespaceImport, knownTokenPackage: true, + sourceFile: importedFile.getFilePath(), }); } } @@ -222,7 +230,12 @@ function getModuleSpecifierFromExportSymbol(symbol: Symbol): { let specifierName: string | undefined; symbol.getDeclarations().forEach((symbolDeclaration) => { if (Node.isVariableDeclaration(symbolDeclaration)) { - const varSymbol = symbolDeclaration.getInitializer()?.getSymbol(); + let symbolInitializer = symbolDeclaration.getInitializer(); + if (Node.isPropertyAccessExpression(symbolInitializer)) { + symbolInitializer = symbolInitializer.getExpression(); + } + + const varSymbol = symbolInitializer?.getSymbol(); if (varSymbol) { const varImportSpecifier = varSymbol.getDeclarations().find((varDeclaration) => { return Node.isImportSpecifier(varDeclaration); @@ -241,6 +254,51 @@ function getModuleSpecifierFromExportSymbol(symbol: Symbol): { declaration = newDeclaration; } } + } else if (Node.isExportAssignment(symbolDeclaration)) { + // we have a default export and need to break down the expression to find the value + const symbolExpression = symbolDeclaration.getExpression(); + if (Node.isIdentifier(symbolExpression)) { + const symbolInitializer = getInitializerFromIdentifier(symbolExpression); + if (Node.isPropertyAccessExpression(symbolInitializer)) { + const accessExpressionSymbol = symbolInitializer.getExpression().getSymbol(); + const varImportSpecifier = accessExpressionSymbol?.getDeclarations().find((varDeclaration) => { + return Node.isImportSpecifier(varDeclaration); + }); + + specifier = varImportSpecifier; + specifierName = varImportSpecifier?.getName(); + if (accessExpressionSymbol) { + const { + moduleSpecifier: newSpecifier, + sourceFile: newSourceFile, + declaration: newDeclaration, + } = getModuleSpecifierFromExportSymbol(accessExpressionSymbol); + moduleSpecifier = newSpecifier; + sourceFile = newSourceFile; + declaration = newDeclaration; + } + } + } else if (Node.isPropertyAccessExpression(symbolExpression)) { + // Get the property access expression's expression (the token part of token.someValue) + // From here we can extract the symbol and recurse. + const accessExpressionSymbol = symbolExpression.getExpression().getSymbol(); + const varImportSpecifier = accessExpressionSymbol?.getDeclarations().find((varDeclaration) => { + return Node.isImportSpecifier(varDeclaration); + }); + + specifier = varImportSpecifier; + specifierName = varImportSpecifier?.getName(); + if (accessExpressionSymbol) { + const { + moduleSpecifier: newSpecifier, + sourceFile: newSourceFile, + declaration: newDeclaration, + } = getModuleSpecifierFromExportSymbol(accessExpressionSymbol); + moduleSpecifier = newSpecifier; + sourceFile = newSourceFile; + declaration = newDeclaration; + } + } } else { // Walk the tree until we find an ExportDeclaration let currentDeclaration: Node | undefined = symbolDeclaration; @@ -291,38 +349,54 @@ function findNearestKnownTokenInfo( knownTokenDeclaration?: Node; knownTokenImportExportName?: string; knownTokenImportExportSpecifier?: Node; + knownTokenValueDeclarationValue?: string; } | undefined { // Get the module specifier if we're an export specifier const { moduleSpecifier, sourceFile, declaration, specifier, specifierName } = getModuleSpecifierFromExportSymbol(exportSymbol); + const isAlias = exportSymbol.isAlias(); if (moduleSpecifier) { - if (isKnownTokenPackage(moduleSpecifier, exportSymbol.getName())) { + if (isKnownTokenPackage(moduleSpecifier, specifierName)) { + let tokenValueDeclaration = exportSymbol.getValueDeclaration(); + if (Node.isVariableDeclaration(tokenValueDeclaration)) { + tokenValueDeclaration = tokenValueDeclaration.getInitializer(); + } + exportSymbol.getDeclarations().forEach((declaration) => { + if (Node.isExportAssignment(declaration)) { + tokenValueDeclaration = declaration.getExpression(); + } + }); return { knownTokenModuleSpecifier: moduleSpecifier, knownTokenSourceFile: sourceFile, knownTokenDeclaration: declaration ?? exportSymbol.getValueDeclaration(), knownTokenImportExportName: specifierName, knownTokenImportExportSpecifier: specifier, + knownTokenValueDeclarationValue: tokenValueDeclaration?.getText(), }; - } else { - // If this is an alias (re-export), get the original symbol + } + // If this is an alias (re-export), get the original symbol + else if (isAlias) { let resolvedSymbol: Symbol = exportSymbol; - if (isAlias) { - // we're ok type casting here because we know the symbol is an alias from the previous check but TS won't pick up on it - resolvedSymbol = typeChecker.getImmediatelyAliasedSymbol(exportSymbol) as Symbol; - if (isKnownTokenPackage(moduleSpecifier, resolvedSymbol.getName())) { - return { - knownTokenModuleSpecifier: moduleSpecifier, - knownTokenSourceFile: sourceFile, - knownTokenDeclaration: declaration ?? resolvedSymbol.getValueDeclaration(), - knownTokenImportExportName: specifierName, - knownTokenImportExportSpecifier: specifier, - }; - } else { - return findNearestKnownTokenInfo(resolvedSymbol, typeChecker); + // we're ok type casting here because we know the symbol is an alias from the previous check but TS won't pick up on it + resolvedSymbol = typeChecker.getImmediatelyAliasedSymbol(exportSymbol) as Symbol; + if (isKnownTokenPackage(moduleSpecifier, resolvedSymbol.getName())) { + let tokenValueDeclaration = resolvedSymbol.getValueDeclaration(); + if (Node.isVariableDeclaration(tokenValueDeclaration)) { + tokenValueDeclaration = tokenValueDeclaration.getInitializer(); } + return { + knownTokenModuleSpecifier: moduleSpecifier, + knownTokenSourceFile: sourceFile, + knownTokenDeclaration: declaration ?? resolvedSymbol.getValueDeclaration(), + knownTokenImportExportName: specifierName, + knownTokenImportExportSpecifier: specifier, + knownTokenValueDeclarationValue: tokenValueDeclaration?.getText(), + }; + } else { + return findNearestKnownTokenInfo(resolvedSymbol, typeChecker); } } } @@ -341,6 +415,7 @@ function findExportDeclaration( declaration: Node; sourceFile: SourceFile; moduleSpecifier: string; + valueDeclarationValue?: string; importExportSpecifierName?: string; importExportSpecifier?: Node; } @@ -376,6 +451,7 @@ function findExportDeclaration( moduleSpecifier: tokenInfo.knownTokenModuleSpecifier, importExportSpecifierName: tokenInfo.knownTokenImportExportName, importExportSpecifier: tokenInfo.knownTokenImportExportSpecifier, + valueDeclarationValue: tokenInfo.knownTokenValueDeclarationValue, }; } } catch (err) { @@ -388,6 +464,9 @@ function findExportDeclaration( * Extract string value from a declaration node */ function extractValueFromDeclaration(declaration: Node): { value: string } | undefined { + // Extract the value from the declaration + // TODO find the value of the token and then pass it into the values. This might be a string or template literal and + // we can use this later but it isn't needed for token identification, more for value processing down the line // Handle variable declarations if (Node.isVariableDeclaration(declaration)) { return { value: declaration.getNameNode().getText() }; diff --git a/packages/token-analyzer/src/tokenUtils.ts b/packages/token-analyzer/src/tokenUtils.ts index 30b83c086..2593c1d4c 100644 --- a/packages/token-analyzer/src/tokenUtils.ts +++ b/packages/token-analyzer/src/tokenUtils.ts @@ -1,5 +1,5 @@ // tokenUtils.ts -import { Symbol, SyntaxKind, Node } from 'ts-morph'; +import { Symbol, SyntaxKind, Node, Expression } from 'ts-morph'; import { TOKEN_REGEX, TokenReference, TokenResolverInfo } from './types.js'; import { shorthands } from '@griffel/react'; @@ -61,7 +61,7 @@ export function isTokenReferenceOld(textOrNode: string | Node | Symbol): boolean return test; } -export function getInitializerFromIdentifier(node: Node): Node | undefined { +export function getInitializerFromIdentifier(node: Node): Expression | undefined { const nodeSymbol = node.getSymbol(); const nodeDeclarations = nodeSymbol?.getDeclarations(); if (nodeSymbol && nodeDeclarations && nodeDeclarations.length > 0) { diff --git a/packages/token-analyzer/src/types.ts b/packages/token-analyzer/src/types.ts index e9946309c..43dbe59f1 100644 --- a/packages/token-analyzer/src/types.ts +++ b/packages/token-analyzer/src/types.ts @@ -66,7 +66,7 @@ export type KnownTokenImportsAndModules = { export const knownTokenImportsAndModules: KnownTokenImportsAndModules = { // if we see any imports from the defaults, we assume it's a token. // @fluentui/tokens is here as a test but should be removed in the future - default: ['@fluentui/semantic-tokens', '@fluentui/tokens'], + default: ['@fluentui/semantic-tokens'], // begin the known token imports tokens: ['@fluentui/react-theme', '@fluentui/react-components', '@fluentui/tokens'], }; From 6859506077867b8ca9c16f5c7d5ad67ca15973af Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 8 May 2025 12:21:05 -0700 Subject: [PATCH 48/75] Updating to use real semantic-tokens package update test code --- package.json | 1 + packages/token-analyzer/src/__tests__/e2e.test.ts | 8 ++++---- .../src/__tests__/reexportTracking.test.ts | 6 +++--- .../src/__tests__/test-files/analysis.json | 8 ++++---- .../src/__tests__/test-files/import-test.ts | 6 +++--- .../src/__tests__/test-files/more-import-test.ts | 2 +- .../src/__tests__/test-files/useButtonStyles.styles.ts | 10 +++++----- .../src/__tests__/typeCheckerImports.test.ts | 6 +++--- yarn.lock | 7 +++++++ 9 files changed, 31 insertions(+), 23 deletions(-) diff --git a/package.json b/package.json index c909d0920..e195f7db8 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,7 @@ "@fluentui/react-migration-v8-v9": "^9.6.23", "@fluentui/react-shared-contexts": "^9.7.2", "@fluentui/scheme-utilities": "^8.3.58", + "@fluentui/semantic-tokens": "0.0.0-nightly-20250501-1704.1", "@griffel/react": "^1.5.22", "@griffel/shadow-dom": "~0.2.0", "@nx/devkit": "20.8.1", diff --git a/packages/token-analyzer/src/__tests__/e2e.test.ts b/packages/token-analyzer/src/__tests__/e2e.test.ts index 6fd93ffa6..857a4bf0e 100644 --- a/packages/token-analyzer/src/__tests__/e2e.test.ts +++ b/packages/token-analyzer/src/__tests__/e2e.test.ts @@ -77,8 +77,8 @@ describe('e2e test', () => { checkTokens( () => styles.useRootBaseClassName.resetStyles.nested["':hover'"].tokens, [ - ['backgroundColor', 'colorNeutralForeground1'], - ['borderColor', 'anotherToken'], + ['backgroundColor', 'cornerCtrlLgHoverRaw'], + ['borderColor', 'ctrlLinkForegroundBrandHover'], ['color', 'tokens.colorNeutralForeground1Hover'], ] ); @@ -98,10 +98,10 @@ describe('e2e test', () => { () => styles.useRootBaseClassName.resetStyles.tokens, [ ['backgroundColor', 'tokens.colorNeutralBackground1'], - ['color', 'semanticTokens.colorNeutralForeground1'], + ['color', 'semanticTokens.cornerFlyoutRest'], ['border', 'tokens.strokeWidthThin'], ['border', 'tokens.colorNeutralStroke1'], - ['fontFamily', 'someToken'], + ['fontFamily', 'textStyleAiHeaderFontfamily'], ['padding', 'tokens.spacingHorizontalM'], ['borderRadius', 'tokens.borderRadiusMedium'], ['fontSize', 'tokens.fontSizeBase300'], diff --git a/packages/token-analyzer/src/__tests__/reexportTracking.test.ts b/packages/token-analyzer/src/__tests__/reexportTracking.test.ts index eae4df0ea..1d01fb616 100644 --- a/packages/token-analyzer/src/__tests__/reexportTracking.test.ts +++ b/packages/token-analyzer/src/__tests__/reexportTracking.test.ts @@ -57,8 +57,8 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'components.ts'), ` - import { anotherToken } from '@fluentui/semantic-tokens'; - export const Component = anotherToken; + import { ctrlLinkForegroundBrandHover } from '@fluentui/semantic-tokens'; + export const Component = ctrlLinkForegroundBrandHover; ` ); @@ -130,7 +130,7 @@ describe('Re-export tracking', () => { // Check that Component was correctly resolved from components.ts expect(importedValues.has('Component')).toBe(true); - expect(importedValues.get('Component')?.value).toBe('anotherToken'); + expect(importedValues.get('Component')?.value).toBe('ctrlLinkForegroundBrandHover'); expect(importedValues.get('Component')?.sourceFile).toContain('components.ts'); }); diff --git a/packages/token-analyzer/src/__tests__/test-files/analysis.json b/packages/token-analyzer/src/__tests__/test-files/analysis.json index aaba87ade..c7b9595eb 100644 --- a/packages/token-analyzer/src/__tests__/test-files/analysis.json +++ b/packages/token-analyzer/src/__tests__/test-files/analysis.json @@ -11,7 +11,7 @@ }, { "property": "color", - "token": ["semanticTokens.colorNeutralForeground1"], + "token": ["semanticTokens.cornerFlyoutRest"], "path": ["color"] }, { @@ -26,7 +26,7 @@ }, { "property": "fontFamily", - "token": ["someToken"], + "token": ["textStyleAiHeaderFontfamily"], "path": ["fontFamily"] }, { @@ -70,12 +70,12 @@ "tokens": [ { "property": "backgroundColor", - "token": ["colorNeutralForeground1"], + "token": ["cornerCtrlLgHoverRaw"], "path": ["':hover'", "backgroundColor"] }, { "property": "borderColor", - "token": ["anotherToken"], + "token": ["ctrlLinkForegroundBrandHover"], "path": ["':hover'", "borderColor"] }, { diff --git a/packages/token-analyzer/src/__tests__/test-files/import-test.ts b/packages/token-analyzer/src/__tests__/test-files/import-test.ts index f02ac3e48..9f1ae5038 100644 --- a/packages/token-analyzer/src/__tests__/test-files/import-test.ts +++ b/packages/token-analyzer/src/__tests__/test-files/import-test.ts @@ -1,4 +1,4 @@ -import { anotherToken } from '@fluentui/semantic-tokens'; -export { colorNeutralForeground1 } from './more-import-test'; +import { ctrlLinkForegroundBrandHover } from '@fluentui/semantic-tokens'; +export { cornerCtrlLgHoverRaw } from './more-import-test'; -export const importTest = anotherToken; +export const importTest = ctrlLinkForegroundBrandHover; diff --git a/packages/token-analyzer/src/__tests__/test-files/more-import-test.ts b/packages/token-analyzer/src/__tests__/test-files/more-import-test.ts index ae77eee44..d20b3786a 100644 --- a/packages/token-analyzer/src/__tests__/test-files/more-import-test.ts +++ b/packages/token-analyzer/src/__tests__/test-files/more-import-test.ts @@ -1,2 +1,2 @@ // test direct export from the semantic tokens package -export { colorNeutralForeground1 } from '@fluentui/semantic-tokens'; +export { cornerCtrlLgHoverRaw } from '@fluentui/semantic-tokens'; diff --git a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts index c011994d8..d334a0bc5 100644 --- a/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts +++ b/packages/token-analyzer/src/__tests__/test-files/useButtonStyles.styles.ts @@ -5,8 +5,8 @@ import { shorthands, makeStyles, makeResetStyles, mergeClasses } from '@griffel/ import type { SlotClassNames } from '@fluentui/react-utilities'; import type { ButtonSlots, ButtonState } from '@fluentui/react-components'; import * as semanticTokens from '@fluentui/semantic-tokens'; -import { someToken } from '@fluentui/semantic-tokens'; -import { importTest, colorNeutralForeground1 } from './import-test'; +import { textStyleAiHeaderFontfamily } from '@fluentui/semantic-tokens'; +import { importTest, cornerCtrlLgHoverRaw } from './import-test'; export const buttonClassNames: SlotClassNames = { root: 'fui-Button', @@ -42,14 +42,14 @@ const useRootBaseClassName = makeResetStyles({ overflow: 'hidden', backgroundColor: tokenInInitializer2, - color: semanticTokens.colorNeutralForeground1, + color: semanticTokens.cornerFlyoutRest, border: `${tokens.strokeWidthThin} solid ${tokens.colorNeutralStroke1}`, - fontFamily: someToken, + fontFamily: textStyleAiHeaderFontfamily, outlineStyle: 'none', ':hover': { - backgroundColor: colorNeutralForeground1, + backgroundColor: cornerCtrlLgHoverRaw, borderColor: importTest, color: tokens.colorNeutralForeground1Hover, diff --git a/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts b/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts index 151a60a1d..75c83de2d 100644 --- a/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts +++ b/packages/token-analyzer/src/__tests__/typeCheckerImports.test.ts @@ -57,8 +57,8 @@ beforeAll(() => { fs.writeFileSync( path.join(TEST_DIR, 'components.ts'), ` - import { anotherToken } from '@fluentui/semantic-tokens'; - export const Component = anotherToken; + import { ctrlLinkForegroundBrandHover } from '@fluentui/semantic-tokens'; + export const Component = ctrlLinkForegroundBrandHover; ` ); @@ -133,7 +133,7 @@ describe('Type Checker Import Analysis', () => { // Verify standard re-export (Component) expect(importedValues.has('Component')).toBe(true); - expect(importedValues.get('Component')?.value).toBe('anotherToken'); + expect(importedValues.get('Component')?.value).toBe('ctrlLinkForegroundBrandHover'); expect(importedValues.get('Component')?.sourceFile).toContain('components.ts'); // Verify aliased re-export (AliasedValue) diff --git a/yarn.lock b/yarn.lock index 3cde3ca52..70127f0d0 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2717,6 +2717,13 @@ "@fluentui/theme" "^2.6.58" tslib "^2.1.0" +"@fluentui/semantic-tokens@0.0.0-nightly-20250501-1704.1": + version "0.0.0-nightly-20250501-1704.1" + resolved "https://registry.yarnpkg.com/@fluentui/semantic-tokens/-/semantic-tokens-0.0.0-nightly-20250501-1704.1.tgz#8d96d8327153bd3218dd19efa20e7214211361c5" + integrity sha512-ILvDAU4ESViISImGLkET4FKBJWSRjiR9Q1Vn2vVE7b/hrvNfMslJBncQeiZTFF15VrB1XGEFP6tFwV8WcoFftg== + dependencies: + "@swc/helpers" "^0.5.1" + "@fluentui/set-version@^8.2.23": version "8.2.23" resolved "https://registry.yarnpkg.com/@fluentui/set-version/-/set-version-8.2.23.tgz#14032bc9a222a6e50a5cb166e1a39a527cfc69fd" From 151bb67e3ddd6398b31becb145497cd0202be362 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 8 May 2025 17:29:41 -0700 Subject: [PATCH 49/75] Adding template literal processing function --- .../processTemplateStringLiteral.test.ts | 139 ++++++++++++++++++ .../src/__tests__/test-templates.ts | 31 ++++ .../src/processTemplateStringLiteral.ts | 107 ++++++++++++++ 3 files changed, 277 insertions(+) create mode 100644 packages/token-analyzer/src/__tests__/processTemplateStringLiteral.test.ts create mode 100644 packages/token-analyzer/src/__tests__/test-templates.ts create mode 100644 packages/token-analyzer/src/processTemplateStringLiteral.ts diff --git a/packages/token-analyzer/src/__tests__/processTemplateStringLiteral.test.ts b/packages/token-analyzer/src/__tests__/processTemplateStringLiteral.test.ts new file mode 100644 index 000000000..555fd80b2 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/processTemplateStringLiteral.test.ts @@ -0,0 +1,139 @@ +// extractNodesFromTemplateStringLiteral.test.ts +import { Project, TemplateExpression } from 'ts-morph'; +import path from 'path'; +import { extractNodesFromTemplateStringLiteral } from '../processTemplateStringLiteral.js'; + +describe('extractNodesFromTemplateStringLiteral', () => { + // Set up the ts-morph project and load our test file + const project = new Project(); + const testFilePath = path.resolve(__dirname, './test-templates.ts'); + const sourceFile = project.addSourceFileAtPath(testFilePath); + + // Helper function to find a template expression by its variable name + const findTemplateByName = (name: string): TemplateExpression => { + const variableDeclarations = sourceFile.getVariableDeclarations().filter((vd) => vd.getName() === name); + + if (variableDeclarations.length !== 1) { + throw new Error(`Expected to find exactly one variable declaration with name ${name}`); + } + + const initializer = variableDeclarations[0].getInitializer(); + if (!initializer || !initializer.getKind() || !initializer.getKindName().includes('Template')) { + throw new Error(`Variable ${name} is not initialized to a template expression`); + } + + return initializer as TemplateExpression; + }; + + test('Test Case 1: Basic example with nested var() functions', () => { + const template = findTemplateByName('template1'); + const result = extractNodesFromTemplateStringLiteral(template); + + expect(result.extractedExpressions.length).toBe(2); + expect(result.extractedExpressions[0].length).toBe(2); + expect(result.extractedExpressions[1].length).toBe(2); + + // Verify the nodes are the correct ones by checking their text + expect(result.extractedExpressions[0][0].getText()).toBe('someNode'); + expect(result.extractedExpressions[0][1].getText()).toBe('anotherNode'); + expect(result.extractedExpressions[1][0].getText()).toBe('moreNodes'); + expect(result.extractedExpressions[1][1].getText()).toBe('evenMoreNodes'); + }); + + test('Test Case 2: Mixed content with different nesting patterns', () => { + const template = findTemplateByName('template2'); + const result = extractNodesFromTemplateStringLiteral(template); + + expect(result.extractedExpressions.length).toBe(2); + expect(result.extractedExpressions[0].length).toBe(1); + expect(result.extractedExpressions[1].length).toBe(3); + + expect(result.extractedExpressions[0][0].getText()).toBe('someNode'); + expect(result.extractedExpressions[1][0].getText()).toBe('moreNodes'); + expect(result.extractedExpressions[1][1].getText()).toBe('anotherNode'); + expect(result.extractedExpressions[1][2].getText()).toBe('evenMoreNodes'); + }); + + test('Test Case 3: No var functions - each expression gets its own group', () => { + const template = findTemplateByName('template3'); + const result = extractNodesFromTemplateStringLiteral(template); + + // Should extract both expressions, each in its own group + expect(result.extractedExpressions.length).toBe(2); + expect(result.extractedExpressions[0].length).toBe(1); + expect(result.extractedExpressions[1].length).toBe(1); + + expect(result.extractedExpressions[0][0].getText()).toBe('someNode'); + expect(result.extractedExpressions[1][0].getText()).toBe('anotherNode'); + }); + + test('Test Case 4: Simple case with one var function', () => { + const template = findTemplateByName('template4'); + const result = extractNodesFromTemplateStringLiteral(template); + + expect(result.extractedExpressions.length).toBe(1); + expect(result.extractedExpressions[0].length).toBe(1); + expect(result.extractedExpressions[0][0].getText()).toBe('someNode'); + }); + + test('Test Case 5: Deeply nested var() functions', () => { + const template = findTemplateByName('template5'); + const result = extractNodesFromTemplateStringLiteral(template); + + expect(result.extractedExpressions.length).toBe(1); + expect(result.extractedExpressions[0].length).toBe(3); + expect(result.extractedExpressions[0][0].getText()).toBe('someNode'); + expect(result.extractedExpressions[0][1].getText()).toBe('anotherNode'); + expect(result.extractedExpressions[0][2].getText()).toBe('moreNodes'); + }); + + test('Test Case 6: Multiple var() functions at the same level', () => { + const template = findTemplateByName('template6'); + const result = extractNodesFromTemplateStringLiteral(template); + + expect(result.extractedExpressions.length).toBe(4); + expect(result.extractedExpressions[0].length).toBe(1); + expect(result.extractedExpressions[1].length).toBe(1); + expect(result.extractedExpressions[2].length).toBe(1); + expect(result.extractedExpressions[3].length).toBe(1); + + expect(result.extractedExpressions[0][0].getText()).toBe('someNode'); + expect(result.extractedExpressions[1][0].getText()).toBe('anotherNode'); + expect(result.extractedExpressions[2][0].getText()).toBe('moreNodes'); + expect(result.extractedExpressions[3][0].getText()).toBe('evenMoreNodes'); + }); + + test('Test Case 7: Missing closing parentheses (edge case)', () => { + const template = findTemplateByName('template7'); + const result = extractNodesFromTemplateStringLiteral(template); + + // With missing closing parenthesis, all nodes stay in the same group + expect(result.extractedExpressions.length).toBe(1); + expect(result.extractedExpressions[0].length).toBe(3); + + expect(result.extractedExpressions[0][0].getText()).toBe('someNode'); + expect(result.extractedExpressions[0][1].getText()).toBe('anotherNode'); + expect(result.extractedExpressions[0][2].getText()).toBe('moreNodes'); + }); + + test('Test Case 8: Empty var() functions', () => { + const template = findTemplateByName('template8'); + const result = extractNodesFromTemplateStringLiteral(template); + + expect(result.extractedExpressions.length).toBe(1); + expect(result.extractedExpressions[0].length).toBe(1); + expect(result.extractedExpressions[0][0].getText()).toBe('someNode'); + }); + + test('Test Case 9: Mix of CSS properties and var() functions', () => { + const template = findTemplateByName('template9'); + const result = extractNodesFromTemplateStringLiteral(template); + + expect(result.extractedExpressions.length).toBe(2); + expect(result.extractedExpressions[0].length).toBe(1); + expect(result.extractedExpressions[1].length).toBe(1); + + expect(result.extractedExpressions[0][0].getText()).toBe('someNode'); + expect(result.extractedExpressions[1][0].getText()).toBe('anotherNode'); + }); +}); diff --git a/packages/token-analyzer/src/__tests__/test-templates.ts b/packages/token-analyzer/src/__tests__/test-templates.ts new file mode 100644 index 000000000..483d8a711 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/test-templates.ts @@ -0,0 +1,31 @@ +export const someNode = 'some value'; +export const anotherNode = 'another value'; +export const moreNodes = 'more values'; +export const evenMoreNodes = 'even more values'; + +// Test case 1: Basic example with nested var() functions +export const template1 = `var(--a, var(${someNode}, var(${anotherNode}))) var(${moreNodes}, var(${evenMoreNodes}))`; + +// Test case 2: Mixed content with different nesting patterns +export const template2 = `var(--x) no-extraction var(--y, ${someNode}) var(${moreNodes}, var(--z, ${anotherNode}, ${evenMoreNodes}))`; + +// Test case 3: No var functions +export const template3 = `no var functions here just ${someNode} and ${anotherNode}`; + +// Test case 4: Simple case with one var function +export const template4 = `var(${someNode}) simple case`; + +// Test case 5: Deeply nested var() functions +export const template5 = `var(--deep, var(--deeper, var(--deepest, ${someNode}, ${anotherNode}, var(${moreNodes}))))`; + +// Test case 6: Multiple var() functions at the same level +export const template6 = `var(${someNode}) var(${anotherNode}) var(${moreNodes}) var(${evenMoreNodes})`; + +// Test case 7: Missing closing parentheses (edge case) +export const template7 = `var(--broken, var(${someNode}, var(${anotherNode})) var(${moreNodes})`; + +// Test case 8: Empty var() functions +export const template8 = `var(--empty) var(--also-empty, ${someNode})`; + +// Test case 9: Mix of CSS properties and var() functions +export const template9 = `color: red; background: var(--bg, ${someNode}); padding: 10px; border: var(--border, ${anotherNode})`; diff --git a/packages/token-analyzer/src/processTemplateStringLiteral.ts b/packages/token-analyzer/src/processTemplateStringLiteral.ts new file mode 100644 index 000000000..0770d4b81 --- /dev/null +++ b/packages/token-analyzer/src/processTemplateStringLiteral.ts @@ -0,0 +1,107 @@ +import { TemplateExpression, Node } from 'ts-morph'; + +/** + * Function that processes template string literals that might contain references to tokens. + * It extracts potential token references for further processing. Since we want to use the same logic + */ +export const processTemplateStringLiteral = (): {} => {}; + +interface ExtractedNodessFromTemplateStringLiteral { + /** + * The original template expression that we're processing. + */ + originalExpression: TemplateExpression; + /** + * 3D array of nodes that are within var() functions. Each group of nodes is stored in a separate array. + */ + extractedExpressions: Node[][]; +} + +/** + * pulls nodes out of a template string literal in the order they appear in if they're within var() functions. + * If there are multiple non-nested var() functions, we place them in a 3d array at the top level. So grouped Nodes stay + * together and the order is maintained. + * ex: + * for string `var(--a, var(${someNode}, var(${anotherNode}))) var(${moreNodes}, var(${evenMoreNodes}))` + * we would return: + * [ + * [someNode, anotherNode], + * [moreNodes, evenMoreNodes] + * ] + * @param expression + */ +export const extractNodesFromTemplateStringLiteral = ( + expression: TemplateExpression +): ExtractedNodessFromTemplateStringLiteral => { + const extractedExpressions: Node[][] = []; + const spans = expression.getTemplateSpans(); + + // Track the state as we process each part of the template + let currentGroup: Node[] = []; + let inVar = false; + let nestingLevel = 0; + + // Process the template head + const head = expression.getHead().getText(); + processText(head); + + // Process each span and its literal + spans.forEach((span) => { + // Process the expression + const expr = span.getExpression(); + if (inVar) { + // If inside var(), add to current group + currentGroup.push(expr); + } else { + // If not inside var(), create a standalone group for this expression + extractedExpressions.push([expr]); + } + + // Process the literal text after this expression + const literal = span.getLiteral().getText(); + processText(literal); + }); + + // Helper function to process text parts + function processText(text: string) { + for (let i = 0; i < text.length; i++) { + // Check for start of var() - no whitespace allowed + if (i + 3 < text.length && text.substring(i, i + 4) === 'var(' && (nestingLevel === 0 || inVar)) { + if (nestingLevel === 0) { + inVar = true; + // If we already have a group, add it to our results + if (currentGroup.length > 0) { + extractedExpressions.push([...currentGroup]); + currentGroup = []; + } + } + nestingLevel++; + i += 3; // Skip to the opening parenthesis + } + // Track parenthesis nesting + else if (text[i] === '(' && inVar) { + nestingLevel++; + } else if (text[i] === ')' && inVar) { + nestingLevel--; + if (nestingLevel === 0) { + inVar = false; + // If we've closed a var() and have a group, add it + if (currentGroup.length > 0) { + extractedExpressions.push([...currentGroup]); + currentGroup = []; + } + } + } + } + } + + // Handle any remaining nodes in the current group + if (currentGroup.length > 0) { + extractedExpressions.push(currentGroup); + } + + return { + originalExpression: expression, + extractedExpressions, + }; +}; From 13958dd98bb0020226a76656612d1bdeadd7d1f1 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 8 May 2025 18:09:16 -0700 Subject: [PATCH 50/75] process template expressions properly in resolver clean up old semantic-tokens mock update some tests delete cssVarTokenExtractor file --- .../src/__tests__/cssVarE2E.test.ts | 3 +- .../test-files/semantic-tokens/index.ts | 3 - .../semantic-tokens/semantic-tokens.ts | 2 - .../src/cssVarTokenExtractor.ts | 102 ------------------ .../src/processTemplateStringLiteral.ts | 6 -- packages/token-analyzer/src/tokenResolver.ts | 84 ++++++--------- 6 files changed, 35 insertions(+), 165 deletions(-) delete mode 100644 packages/token-analyzer/src/__tests__/test-files/semantic-tokens/index.ts delete mode 100644 packages/token-analyzer/src/__tests__/test-files/semantic-tokens/semantic-tokens.ts delete mode 100644 packages/token-analyzer/src/cssVarTokenExtractor.ts diff --git a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts index be1227c54..7ff77f4e8 100644 --- a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts +++ b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts @@ -10,6 +10,7 @@ const cssVarsStyleFile = ` import { makeStyles } from '@griffel/react'; import { tokens } from '@fluentui/react-theme'; import { colorPrimary, colorSecondary, nestedFallbackVar, complexCssVar } from './tokenVars'; +import { ctrlLinkForegroundBrandHover } from '@fluentui/semantic-tokens'; const useStyles = makeStyles({ // Direct token reference @@ -30,7 +31,7 @@ const useStyles = makeStyles({ }, // Nested CSS variable with token nestedCssVar: { - background: \`var(--primary, var(--secondary, \${tokens.colorBrandForeground2}))\`, + background: \`var(--primary, var(\${ctrlLinkForegroundBrandHover}, \${tokens.colorBrandForeground2}))\`, }, // Imported nested CSS variable with token importedNestedVar: { diff --git a/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/index.ts b/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/index.ts deleted file mode 100644 index 58bb5be6f..000000000 --- a/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { someToken, anotherToken } from './semantic-tokens'; - -export const colorNeutralForeground1 = '--color-neutral-foreground-1'; diff --git a/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/semantic-tokens.ts b/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/semantic-tokens.ts deleted file mode 100644 index 6874e0204..000000000 --- a/packages/token-analyzer/src/__tests__/test-files/semantic-tokens/semantic-tokens.ts +++ /dev/null @@ -1,2 +0,0 @@ -export const someToken = ''; -export const anotherToken = ''; diff --git a/packages/token-analyzer/src/cssVarTokenExtractor.ts b/packages/token-analyzer/src/cssVarTokenExtractor.ts deleted file mode 100644 index 298e02065..000000000 --- a/packages/token-analyzer/src/cssVarTokenExtractor.ts +++ /dev/null @@ -1,102 +0,0 @@ -// cssVarTokenExtractor.ts -import { log } from './debugUtils.js'; -import { TokenReference } from './types.js'; -import { addTokenToArray, extractTokensFromText } from './tokenUtils.js'; - -/** - * Extracts token references from CSS variable syntax including nested fallback chains - * Example: var(--some-token, var(--fallback, var(${tokens.someToken}))) - * - * @param value The CSS variable string to process - * @param propertyName The CSS property name this value is assigned to - * @param path The path in the style object - * @param TOKEN_REGEX The regex pattern to match token references - * @returns Array of token references found in the string - */ -export function extractTokensFromCssVars( - value: string, - propertyName: string, - path: string[] = [] -): TokenReference[] { - let tokens: TokenReference[] = []; - - let testValue = value; - - // Direct token matches in the string - const directMatches = extractTokensFromText(testValue); - if (directMatches.length > 0) { - directMatches.forEach((match) => { - testValue = testValue.replace(match, ''); // Remove direct matches from the string - tokens = addTokenToArray( - { - property: propertyName, - token: [match], - path, - }, - tokens - ); - }); - } - - // we have an issue with duplicated calls. A direct match will match the whole string as would a token within a var part - // found by the regex, so we need to remove the direct matches from the string - - // Look for CSS var() patterns - const varPattern = /var\s*\(\s*([^,)]*),?\s*(.*?)\s*\)/g; - let match: RegExpExecArray | null; - - while ((match = varPattern.exec(testValue)) !== null) { - const fullMatch = match[0]; // The entire var(...) expression - const varName = match[1]; // The CSS variable name - const fallback = match[2]; // The fallback value, which might contain nested var() calls - - log(`Processing CSS var: ${fullMatch}`); - log(` - Variable name: ${varName}`); - log(` - Fallback: ${fallback}`); - - // Check if the variable name contains a token reference - const varNameTokens = extractTokensFromText(varName); - if (varNameTokens.length > 0) { - varNameTokens.forEach((token) => { - tokens = addTokenToArray( - { - property: propertyName, - token: [token], - path, - }, - tokens - ); - }); - } - - // If there's a fallback value, it might contain tokens or nested var() calls - if (fallback) { - // Recursively process the fallback - if (fallback.includes('var(')) { - const fallbackTokens = extractTokensFromCssVars( - fallback, - propertyName, - path - ); - tokens.push(...fallbackTokens); - } else { - // Check for direct token references in the fallback - const fallbackTokens = extractTokensFromText(fallback); - if (fallbackTokens.length > 0) { - fallbackTokens.forEach((token) => { - tokens = addTokenToArray( - { - property: propertyName, - token: [token], - path, - }, - tokens - ); - }); - } - } - } - } - - return tokens; -} diff --git a/packages/token-analyzer/src/processTemplateStringLiteral.ts b/packages/token-analyzer/src/processTemplateStringLiteral.ts index 0770d4b81..eb354b31f 100644 --- a/packages/token-analyzer/src/processTemplateStringLiteral.ts +++ b/packages/token-analyzer/src/processTemplateStringLiteral.ts @@ -1,11 +1,5 @@ import { TemplateExpression, Node } from 'ts-morph'; -/** - * Function that processes template string literals that might contain references to tokens. - * It extracts potential token references for further processing. Since we want to use the same logic - */ -export const processTemplateStringLiteral = (): {} => {}; - interface ExtractedNodessFromTemplateStringLiteral { /** * The original template expression that we're processing. diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index e28d355d5..8b1adc689 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -8,20 +8,15 @@ import { CallExpression, TemplateExpression, Identifier, - TemplateSpan, - TemplateHead, - TemplateMiddle, - TemplateTail, } from 'ts-morph'; import { TokenReference, TokenResolverInfo } from './types'; -import { extractTokensFromCssVars } from './cssVarTokenExtractor'; import { addTokenToArray, - extractTokensFromText, getInitializerFromIdentifier, getPropertiesForShorthand, isTokenReference, } from './tokenUtils'; +import { extractNodesFromTemplateStringLiteral } from './processTemplateStringLiteral'; /** * Function that centarlizes the logic for resolving tokens from a node. @@ -53,16 +48,6 @@ export const resolveToken = (info: TokenResolverInfo): TokenReference[] => { return processCallExpression(info as TokenResolverInfo); } else if (Node.isPropertyAssignment(node)) { return processPropertyAssignment(info as TokenResolverInfo); - } else if ( - Node.isTemplateSpan(node) || - Node.isTemplateHead(node) || - Node.isTemplateMiddle(node) || - Node.isTemplateTail(node) - ) { - // Unless we need specialized handling, use the template expression resolver - return processTemplateExpression( - info as TokenResolverInfo - ); } return tokens; @@ -241,49 +226,46 @@ const processCallExpression = (info: TokenResolverInfo): TokenRe }; /** - * + * This is where we should process template spans and feed it back into resolveToken. We also need to check that + * imported values are tokens etc. + * We will also break down each individual group of fallbacks as multiple tokens in our output. So if a single property + * like shadow has a few var() functions, we should return each one as a separate token. We should do the same with + * separate token values in general. * @param info * @returns */ -const processTemplateExpression = ( - info: TokenResolverInfo -): TokenReference[] => { - /** - * This is where we should process template spans and feed it back into resolveToken. We also need to check that - * imported values are tokens etc. - */ - - const { node, path, parentName, tokens, isVariableReference, sourceFile } = info; - const text = node.getText(); +const processTemplateExpression = (info: TokenResolverInfo): TokenReference[] => { + const { node, path, parentName, tokens } = info; + const returnTokens = tokens.slice(); - // Check for CSS var() syntax that might contain tokens - if (text.includes('var(')) { - const cssVarTokens = extractTokensFromCssVars(text, path[path.length - 1] ?? parentName, path); - return addTokenToArray(cssVarTokens, tokens, isVariableReference, sourceFile); - } else { - // Check for direct token references - const matches = extractTokensFromText(node); - - let returnTokens = tokens.slice(); - if (matches.length > 0) { - matches.forEach((match) => { - returnTokens = addTokenToArray( - { - property: path[path.length - 1] ?? parentName, - token: [match], - path, - isVariableReference, - sourceFile, - }, - returnTokens, - isVariableReference, - sourceFile - ); + for (const expressions of extractNodesFromTemplateStringLiteral(node).extractedExpressions) { + // We should create a new token entry if we do indeed have tokens within our literal at this stage + const groupedTokens: TokenReference = { + property: path[path.length - 1] ?? parentName, + token: [], + path, + }; + for (const nestedExpression of expressions) { + const processedToken = resolveToken({ + ...info, + tokens: [], + node: nestedExpression, }); + if (processedToken.length > 0) { + for (const token of processedToken) { + groupedTokens.token.push(...token.token); + } + } } - return returnTokens; + // If we have verified tokens (at least one), push them to the tokens array + // If this is empty, we only had expressions but no tokens. + if (groupedTokens.token.length > 0) { + returnTokens.push(groupedTokens); + } } + + return returnTokens; }; const processPropertyAssignment = (info: TokenResolverInfo): TokenReference[] => { From 349fde6f0af286c0d2ff0f457a8fae57f62c963d Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 8 May 2025 19:00:33 -0700 Subject: [PATCH 51/75] Update comment --- packages/token-analyzer/src/tokenResolver.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index 8b1adc689..e65dde216 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -226,9 +226,9 @@ const processCallExpression = (info: TokenResolverInfo): TokenRe }; /** - * This is where we should process template spans and feed it back into resolveToken. We also need to check that + * This is where we process template spans and feed it back into resolveToken. We also need to check that * imported values are tokens etc. - * We will also break down each individual group of fallbacks as multiple tokens in our output. So if a single property + * We also break down each individual group of fallbacks as multiple tokens in our output. So if a single property * like shadow has a few var() functions, we should return each one as a separate token. We should do the same with * separate token values in general. * @param info From 1bffe0dd2ec0b6087dd51a6fc3d6b30c811a59a4 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 9 May 2025 15:35:41 -0700 Subject: [PATCH 52/75] more notes and tasks in todo --- packages/token-analyzer/README.md | 40 +++++++++++++++++++++++++++---- 1 file changed, 35 insertions(+), 5 deletions(-) diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index f47399ea2..da659c6d6 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -12,8 +12,6 @@ A static analysis tool that scans your project's style files to track and analyz - Update extractTokensFromText to find imported vars and tokens. We've updated it to resolve variable declarations thusfar but there's potential cases where we could have imports impact this as well. - add tests for structure and output. ~~We're processing the styles but not putting them in the right places right now~~ - update contributing doc with info about version management -- Dedupe logic from extractTokensFromText and isTokenReference -- Add token test that determines which package tokens come from. IE (@fluentui/tokens or @fluentui/semantic-tokens) - Data Flow - find all styles files - get all imports, analyze them for token references or values, return them to the main script flow @@ -23,9 +21,41 @@ A static analysis tool that scans your project's style files to track and analyz - The data flow complexity is a bit high currently and we should only recurse where we actually need to. - property set in styles -> analyze type (expression call, initializer, declaration, etc) -> resolve given import information, type and rules -> Once we resolve, analyze if it's a token which should be a single call so we can centralize it -> return token with path, value, etc. This should include priority order if we have a var() fallback structure. - we need to update isToken to resolve to which package/module it's imported from. -- We also need to do this for shorthands -- We should write a function that does this from a node and follows it up the import chain. -- Clean processImportedStringTokens to point back to resolveToken as there's some duplication there + + - We also need to do this for shorthands + - We should write a function that does this from a node and follows it up the import chain. + +- Update test to return promise instead of async/await function. +- We need to update import analyzer to handle namespace imports + +- when we resolve imports we store template string literal spans as part of the object but I don't thiink that we need to this. More importantly, we need to maintain a reference to the node for any additional processing, and then the individual initializers and other internal nodes. Clean this up before moving into the ordering as it'll be easier to deal with. We should also ensure this is consistent across template literals. + +- Switch to getImmediatelyAliasedSymbol from getAliasedSymbol. This is because if we have a direct export in a file like `export { someToken } from 'semantic-tokens` in a local file, we won't resolve to the local export location, but the library file, or a d.ts file. This isn't really what we need and we'd missed the actual import from `semantic-tokens` because of this and not correctly mark it. We'd be out of the boundary of our current application. This means we do need to manually walk but it shouldn't be overly complex. We should grab the immediate alias, see if we see a known token package, if not, walk again, etc until we can't anymore. From there if there isn't one we know it's not a token. + +- handle when something has an import/export alias. Ex: `import { someToken as blah}`. In that case we have to look at `propertyName` and `name` to get the original and aliased names. + +ExportDeclaration + +## Notes + +Get the symbol, find it's declaration, walk up the tree until we find the `ExportDeclaration`. From there grab the module specifier +compare that value to our known list +if it doesn't match, walk again, repeat +save the declaration + +shortcut analysis if straight import is pointing to a known token package. If not, we can then analyze further with + +-_ get source file (useButtonStyles.styles.ts) -_ pull all import declarations -_ process each import declaration based on type (named, default, namespace) -_ get the name of each import (if named or namespace) -_ check if the import is from a known package right away -_ if so, we add it and move on + +- if not we process the file the module specifier resolves to (this could be a package index.ts or local file) (import-test.ts) + - we could have direct exports (re-exports) or we could have imports from other packages and then exports of those values. Either way the first thing we see should + be an export. We need to determine if the export is a full re-export or a declaration (extractValueFromDeclaration does this already). + +Since we correctly resolve colorNeutralForeground1, we need to then add a mapping that the symbol resolved back into a known token package. + +LET'S GET ODSPS AND TEAMS UXEs TO PULL DOWN THE NIGHTLY STUFF AND TRY TO USE SEMANTIC TOKENS ON THEIR CUSTOM COMPONENTS + +- could we get anyone to also look at calendar? talk to Jeff about if there's anyone that could take a look? ## Features From a5f6a40e2f741aa2b73a4784f2cac88f076f5127 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 14 May 2025 23:20:57 -0700 Subject: [PATCH 53/75] refactoring import analyzer (partial) create flow diagram of import analyzer with mermaid visual some changes to process initializer in token resollver (unsure if we'll keep) --- .../src/__tests__/reexportResolver.test.ts | 45 ++ packages/token-analyzer/src/importAnalyzer.ts | 449 ++++-------------- .../token-analyzer/src/importAnalyzerFlow.md | 25 + .../token-analyzer/src/reexportResolver.ts | 122 +++++ packages/token-analyzer/src/tokenResolver.ts | 37 +- 5 files changed, 304 insertions(+), 374 deletions(-) create mode 100644 packages/token-analyzer/src/__tests__/reexportResolver.test.ts create mode 100644 packages/token-analyzer/src/importAnalyzerFlow.md create mode 100644 packages/token-analyzer/src/reexportResolver.ts diff --git a/packages/token-analyzer/src/__tests__/reexportResolver.test.ts b/packages/token-analyzer/src/__tests__/reexportResolver.test.ts new file mode 100644 index 000000000..f86a6c951 --- /dev/null +++ b/packages/token-analyzer/src/__tests__/reexportResolver.test.ts @@ -0,0 +1,45 @@ +import { Project, ModuleKind } from 'ts-morph'; +import { resolveExport } from '../reexportResolver'; +describe('reexportResolver.resolveExport', () => { + let project: Project; + beforeEach(() => { + project = new Project({ + useInMemoryFileSystem: true, + skipFileDependencyResolution: true, + compilerOptions: { module: ModuleKind.CommonJS }, + }); + }); + + it('resolves a direct export in the same file', () => { + const file = project.createSourceFile('A.ts', `export const foo = 'bar';`); + const info = resolveExport(file, 'foo', project.getTypeChecker()); + expect(info).toBeDefined(); + expect(info!.sourceFile.getBaseName()).toBe('A.ts'); + expect(info!.declaration.getText()).toContain('foo'); + }); + + it('resolves a re-export from another module', () => { + project.createSourceFile('A.ts', `export const foo = 'baz';`); + const fileB = project.createSourceFile('B.ts', `export { foo } from './A';`); + const infoB = resolveExport(fileB, 'foo', project.getTypeChecker()); + expect(infoB).toBeDefined(); + expect(infoB!.sourceFile.getBaseName()).toBe('A.ts'); + expect(infoB!.declaration.getText()).toContain('foo'); + }); + + it('resolves a renamed re-export alias', () => { + project.createSourceFile('A.ts', `export const foo = 123;`); + const fileC = project.createSourceFile('C.ts', `export { foo as foo2 } from './A';`); + const infoC = resolveExport(fileC, 'foo2', project.getTypeChecker()); + expect(infoC).toBeDefined(); + expect(infoC!.sourceFile.getBaseName()).toBe('A.ts'); + expect(infoC!.importExportSpecifierName).toBe('foo'); + expect(infoC!.declaration.getText()).toContain('foo'); + }); + + it('returns undefined for missing export', () => { + const file = project.createSourceFile('X.ts', `export const a = 1;`); + const info = resolveExport(file, 'nope', project.getTypeChecker()); + expect(info).toBeUndefined(); + }); +}); diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index e30d977a9..4aab7612c 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -1,9 +1,10 @@ // importAnalyzer.ts -import { Project, Node, SourceFile, ImportDeclaration, Symbol, TypeChecker } from 'ts-morph'; +import { Project, Node, SourceFile, ImportDeclaration, Symbol, TypeChecker, ts } from 'ts-morph'; import { log } from './debugUtils.js'; -import { knownTokenImportsAndModules } from './types.js'; +import { resolveExport, isKnownTokenPackage, ExportInfo } from './reexportResolver'; import { getModuleSourceFile } from './moduleResolver.js'; import { getInitializerFromIdentifier } from './tokenUtils'; +import { extractNodesFromTemplateStringLiteral } from './processTemplateStringLiteral'; /** * Represents a value imported from another module @@ -12,7 +13,18 @@ export interface ImportedValue { value: string; sourceFile: string; node: Node; - knownTokenPackage: boolean; + declaredValue?: string; + declarationNode?: Node; + templateGroups?: Node[][]; +} + +// Context passed through each import handler for clearer signature +interface ImportContext { + importDecl: ImportDeclaration; + moduleSpecifier: string; + importedFile: SourceFile; + typeChecker: TypeChecker; + importedValues: Map; } /** @@ -60,71 +72,47 @@ async function processImportDeclaration( return; } + const context: ImportContext = { + importDecl, + moduleSpecifier, + importedFile, + typeChecker, + importedValues, + }; + // Process named imports (import { x } from 'module') - processNamedImports(importDecl, importedFile, project, importedValues, typeChecker, moduleSpecifier); + processNamedImports(context); // Process default import (import x from 'module') - processDefaultImport(importDecl, importedFile, project, importedValues, typeChecker, moduleSpecifier); + processDefaultImport(context); - processNamespaceImport(importDecl, importedFile, project, importedValues, typeChecker, moduleSpecifier); + processNamespaceImport(context); } /** * Process named imports using TypeScript's type checker to follow re-exports */ -function processNamedImports( - importDecl: ImportDeclaration, - importedFile: SourceFile, - project: Project, - importedValues: Map, - typeChecker: TypeChecker, - moduleSpecifier: string -): void { +function processNamedImports(context: ImportContext): void { + const { importDecl, typeChecker } = context; + for (const namedImport of importDecl.getNamedImports()) { - // We want to keep the node reference the same as the original import so when we do equality checks we can ensure - // we're going to get a valid result. If we moved to use a nested value we'd never get a true result across files. + console.log( + `is this module ${context.moduleSpecifier} relative? ${ts.isExternalModuleNameRelative(context.moduleSpecifier)}` + ); const nameOrAliasNode = namedImport.getAliasNode() ?? namedImport; const importName = namedImport.getName(); const alias = namedImport.getAliasNode()?.getText() || importName; - // We should process the imports module import first to determine if it's a known token package - // If it's not, we can then process it's value as it's likely another file within the application or library. - if (isKnownTokenPackage(moduleSpecifier, importName)) { - importedValues.set(alias, { - value: importName, - node: nameOrAliasNode, // Use the alias node if available, otherwise use the declaration - knownTokenPackage: true, - sourceFile: importedFile.getFilePath(), - }); - - log(`Added known token import: ${alias} = ${importName} from ${importedFile.getFilePath()}`); + if (isKnownTokenPackage(context.moduleSpecifier, importName)) { + // we have a direct token import, record it and move on. + recordImport(context, alias, nameOrAliasNode); + addTemplateGroups(context.importedValues.get(alias)!); } else { - // Find the export's true source using TypeScript's type checker - const exportInfo = findExportDeclaration(importedFile, importName, typeChecker); + const exportInfo: ExportInfo | undefined = resolveExport(context.importedFile, importName, typeChecker); if (exportInfo) { - const { - sourceFile: declarationFile, - moduleSpecifier: exportModuleSpecifier, - importExportSpecifierName, - valueDeclarationValue, - } = exportInfo; - // We need to first check if the import is coming from a known token package - if (isKnownTokenPackage(exportModuleSpecifier, importExportSpecifierName ?? importName)) { - // We don't have a direct known token import, so process where the value is declared and determine if that's a - // known token package or not. If not, we can omit the value. - - importedValues.set(alias, { - // TODO we should set the value to the end token resolution, for now we will process to the import if this is an import - // which we need to get from findExportDeclaration and the processing within that function - value: valueDeclarationValue ?? importName, - node: nameOrAliasNode, - knownTokenPackage: true, - sourceFile: declarationFile.getFilePath(), - }); - - log(`Added imported value: ${alias} = from ${exportModuleSpecifier}`); - } + recordImport(context, alias, nameOrAliasNode, exportInfo); + addTemplateGroups(context.importedValues.get(alias)!); } } } @@ -133,14 +121,9 @@ function processNamedImports( /** * Process default import using TypeScript's type checker */ -function processDefaultImport( - importDecl: ImportDeclaration, - importedFile: SourceFile, - project: Project, - importedValues: Map, - typeChecker: TypeChecker, - moduleSpecifier: string -): void { +function processDefaultImport(context: ImportContext): void { + const { importDecl, typeChecker } = context; + const defaultImport = importDecl.getDefaultImport(); if (!defaultImport) { log(`No default import found in ${importDecl.getModuleSpecifierValue()}`); @@ -148,335 +131,77 @@ function processDefaultImport( } const importName = defaultImport.getText(); - if (isKnownTokenPackage(moduleSpecifier)) { - importedValues.set(importName, { - value: importName, - node: importDecl, - knownTokenPackage: true, - sourceFile: importedFile.getFilePath(), - }); + if (isKnownTokenPackage(context.moduleSpecifier)) { + recordImport(context, importName, importDecl); + addTemplateGroups(context.importedValues.get(importName)!); } else { - // Find the default export's true source - const exportInfo = findExportDeclaration(importedFile, 'default', typeChecker); + const exportInfo: ExportInfo | undefined = resolveExport(context.importedFile, 'default', typeChecker); if (exportInfo) { - const { - sourceFile: declarationFile, - moduleSpecifier: exportModuleSpecifier, - importExportSpecifierName, - valueDeclarationValue, - } = exportInfo; - - if (isKnownTokenPackage(exportModuleSpecifier, importExportSpecifierName ?? importName)) { - importedValues.set(importName, { - // TODO we should set the value to the end token resolution, for now we will process to the import if this is an import - // which we need to get from findExportDeclaration and the processing within that function - value: valueDeclarationValue ?? importName, - node: defaultImport, - knownTokenPackage: true, - sourceFile: declarationFile.getFilePath(), - }); - log( - `Added default import: ${importName} = ${ - valueDeclarationValue ?? importName - } from ${declarationFile.getFilePath()}` - ); - } + recordImport(context, importName, defaultImport, exportInfo); + addTemplateGroups(context.importedValues.get(importName)!); } } } -function processNamespaceImport( - importDecl: ImportDeclaration, - importedFile: SourceFile, - project: Project, - importedValues: Map, - typeChecker: TypeChecker, - moduleSpecifier: string -): void { +function processNamespaceImport(context: ImportContext): void { + const { importDecl, moduleSpecifier, importedFile, importedValues } = context; + const namespaceImport = importDecl.getNamespaceImport(); if (!namespaceImport) { log(`No namespace import found in ${importDecl.getModuleSpecifierValue()}`); return; } - // We need to resolve any re-exports to find the true source of the namespace import just as we do with the - // other import types. Mark as TODO to handle this in the future. We don't expect many to deeply nest namespace - // imports but it's possible. We can prioritize if we see this as a common pattern. - const importName = namespaceImport.getText(); - // Find the default export's true source + // Only record namespace import if it's the tokens package if (isKnownTokenPackage(moduleSpecifier)) { importedValues.set(importName, { value: importName, node: namespaceImport, - knownTokenPackage: true, sourceFile: importedFile.getFilePath(), }); } } -function getModuleSpecifierFromExportSymbol(symbol: Symbol): { - moduleSpecifier: string | undefined; - sourceFile: SourceFile | undefined; - declaration: Node | undefined; - specifier: Node | undefined; - specifierName: string | undefined; -} { - let moduleSpecifier: string | undefined; - let sourceFile: SourceFile | undefined; - let declaration: Node | undefined; - let specifier: Node | undefined; - let specifierName: string | undefined; - symbol.getDeclarations().forEach((symbolDeclaration) => { - if (Node.isVariableDeclaration(symbolDeclaration)) { - let symbolInitializer = symbolDeclaration.getInitializer(); - if (Node.isPropertyAccessExpression(symbolInitializer)) { - symbolInitializer = symbolInitializer.getExpression(); - } - - const varSymbol = symbolInitializer?.getSymbol(); - if (varSymbol) { - const varImportSpecifier = varSymbol.getDeclarations().find((varDeclaration) => { - return Node.isImportSpecifier(varDeclaration); - }); - const varImportSymbol = varImportSpecifier?.getSymbol(); - specifier = varImportSpecifier; - specifierName = varImportSpecifier?.getName(); - if (varImportSymbol) { - const { - moduleSpecifier: newSpecifier, - sourceFile: newSourceFile, - declaration: newDeclaration, - } = getModuleSpecifierFromExportSymbol(varImportSymbol); - moduleSpecifier = newSpecifier; - sourceFile = newSourceFile; - declaration = newDeclaration; - } - } - } else if (Node.isExportAssignment(symbolDeclaration)) { - // we have a default export and need to break down the expression to find the value - const symbolExpression = symbolDeclaration.getExpression(); - if (Node.isIdentifier(symbolExpression)) { - const symbolInitializer = getInitializerFromIdentifier(symbolExpression); - if (Node.isPropertyAccessExpression(symbolInitializer)) { - const accessExpressionSymbol = symbolInitializer.getExpression().getSymbol(); - const varImportSpecifier = accessExpressionSymbol?.getDeclarations().find((varDeclaration) => { - return Node.isImportSpecifier(varDeclaration); - }); - - specifier = varImportSpecifier; - specifierName = varImportSpecifier?.getName(); - if (accessExpressionSymbol) { - const { - moduleSpecifier: newSpecifier, - sourceFile: newSourceFile, - declaration: newDeclaration, - } = getModuleSpecifierFromExportSymbol(accessExpressionSymbol); - moduleSpecifier = newSpecifier; - sourceFile = newSourceFile; - declaration = newDeclaration; - } - } - } else if (Node.isPropertyAccessExpression(symbolExpression)) { - // Get the property access expression's expression (the token part of token.someValue) - // From here we can extract the symbol and recurse. - const accessExpressionSymbol = symbolExpression.getExpression().getSymbol(); - const varImportSpecifier = accessExpressionSymbol?.getDeclarations().find((varDeclaration) => { - return Node.isImportSpecifier(varDeclaration); - }); - - specifier = varImportSpecifier; - specifierName = varImportSpecifier?.getName(); - if (accessExpressionSymbol) { - const { - moduleSpecifier: newSpecifier, - sourceFile: newSourceFile, - declaration: newDeclaration, - } = getModuleSpecifierFromExportSymbol(accessExpressionSymbol); - moduleSpecifier = newSpecifier; - sourceFile = newSourceFile; - declaration = newDeclaration; - } - } - } else { - // Walk the tree until we find an ExportDeclaration - let currentDeclaration: Node | undefined = symbolDeclaration; - while ( - Node.isExportSpecifier(currentDeclaration) || - Node.isNamedExports(currentDeclaration) || - Node.isImportSpecifier(currentDeclaration) || - Node.isNamedImports(currentDeclaration) || - Node.isImportClause(currentDeclaration) - ) { - if (Node.isExportSpecifier(currentDeclaration) || Node.isImportSpecifier(currentDeclaration)) { - specifier = currentDeclaration; - specifierName = currentDeclaration.getName(); - } - currentDeclaration = currentDeclaration.getParent(); - } - - if (Node.isExportDeclaration(currentDeclaration) || Node.isImportDeclaration(currentDeclaration)) { - moduleSpecifier = currentDeclaration.getModuleSpecifierValue(); - sourceFile = currentDeclaration.getSourceFile(); - declaration = currentDeclaration; - } - } - }); - return { moduleSpecifier, sourceFile, declaration, specifier, specifierName }; -} - -function isKnownTokenPackage(moduleSpecifier: string, valueName?: string): boolean { - const knownTokenKeys = Object.keys(knownTokenImportsAndModules); - return ( - (valueName !== undefined && - knownTokenKeys.includes(valueName) && - knownTokenImportsAndModules[valueName].includes(moduleSpecifier)) || - knownTokenImportsAndModules.default.includes(moduleSpecifier) - ); -} - -/** - * Function that walks up the aliases to find the nearest import/export declaration with a known token package - */ -function findNearestKnownTokenInfo( - exportSymbol: Symbol, - typeChecker: TypeChecker -): - | { - knownTokenModuleSpecifier: string; - knownTokenSourceFile?: SourceFile; - knownTokenDeclaration?: Node; - knownTokenImportExportName?: string; - knownTokenImportExportSpecifier?: Node; - knownTokenValueDeclarationValue?: string; - } - | undefined { - // Get the module specifier if we're an export specifier - const { moduleSpecifier, sourceFile, declaration, specifier, specifierName } = - getModuleSpecifierFromExportSymbol(exportSymbol); - - const isAlias = exportSymbol.isAlias(); - if (moduleSpecifier) { - if (isKnownTokenPackage(moduleSpecifier, specifierName)) { - let tokenValueDeclaration = exportSymbol.getValueDeclaration(); - if (Node.isVariableDeclaration(tokenValueDeclaration)) { - tokenValueDeclaration = tokenValueDeclaration.getInitializer(); - } - exportSymbol.getDeclarations().forEach((declaration) => { - if (Node.isExportAssignment(declaration)) { - tokenValueDeclaration = declaration.getExpression(); - } - }); - return { - knownTokenModuleSpecifier: moduleSpecifier, - knownTokenSourceFile: sourceFile, - knownTokenDeclaration: declaration ?? exportSymbol.getValueDeclaration(), - knownTokenImportExportName: specifierName, - knownTokenImportExportSpecifier: specifier, - knownTokenValueDeclarationValue: tokenValueDeclaration?.getText(), - }; - } - // If this is an alias (re-export), get the original symbol - else if (isAlias) { - let resolvedSymbol: Symbol = exportSymbol; - // we're ok type casting here because we know the symbol is an alias from the previous check but TS won't pick up on it - resolvedSymbol = typeChecker.getImmediatelyAliasedSymbol(exportSymbol) as Symbol; - if (isKnownTokenPackage(moduleSpecifier, resolvedSymbol.getName())) { - let tokenValueDeclaration = resolvedSymbol.getValueDeclaration(); - if (Node.isVariableDeclaration(tokenValueDeclaration)) { - tokenValueDeclaration = tokenValueDeclaration.getInitializer(); - } - return { - knownTokenModuleSpecifier: moduleSpecifier, - knownTokenSourceFile: sourceFile, - knownTokenDeclaration: declaration ?? resolvedSymbol.getValueDeclaration(), - knownTokenImportExportName: specifierName, - knownTokenImportExportSpecifier: specifier, - knownTokenValueDeclarationValue: tokenValueDeclaration?.getText(), - }; - } else { - return findNearestKnownTokenInfo(resolvedSymbol, typeChecker); - } - } +// Helper to record an import consistently +function recordImport(ctx: ImportContext, alias: string, node: Node, exportInfo?: ExportInfo): void { + const { importedValues, moduleSpecifier, importedFile } = ctx; + const pkg = exportInfo?.moduleSpecifier ?? moduleSpecifier; + const name = exportInfo?.importExportSpecifierName ?? alias; + + // Only record known token imports + if (isKnownTokenPackage(pkg, name)) { + const source = exportInfo?.sourceFile ?? importedFile; + + // Use actual token literal when available + const importValue = exportInfo?.valueDeclarationValue ?? alias; + importedValues.set(alias, { + value: importValue, + node, + sourceFile: source.getFilePath(), + declaredValue: exportInfo?.valueDeclarationValue, + declarationNode: exportInfo?.declaration, + }); + log(`Recorded token import: ${alias} from ${source.getFilePath()}`); } - return undefined; } -/** - * Find an export's original declaration using TypeScript's type checker - */ -function findExportDeclaration( - sourceFile: SourceFile, - exportName: string, - typeChecker: TypeChecker -): - | { - declaration: Node; - sourceFile: SourceFile; - moduleSpecifier: string; - valueDeclarationValue?: string; - importExportSpecifierName?: string; - importExportSpecifier?: Node; - } - | undefined { - try { - // Get the source file's symbol (represents the module) - const sourceFileSymbol = typeChecker.getSymbolAtLocation(sourceFile); - if (!sourceFileSymbol) { - log(`No symbol found for source file ${sourceFile.getFilePath()}`); - return undefined; - } - - // Get all exports from this module - const exports = typeChecker.getExportsOfModule(sourceFileSymbol); - if (!exports || exports.length === 0) { - log(`No exports found in module ${sourceFile.getFilePath()}`); - return undefined; - } - - // Find the specific export we're looking for - const exportSymbol = exports.find((symbol: Symbol) => symbol.getName() === exportName); - if (!exportSymbol) { - log(`Export symbol '${exportName}' not found in ${sourceFile.getFilePath()}`); - return undefined; - } - - const tokenInfo = findNearestKnownTokenInfo(exportSymbol, typeChecker); - - if (tokenInfo && tokenInfo.knownTokenDeclaration && tokenInfo.knownTokenSourceFile) { - return { - declaration: tokenInfo.knownTokenDeclaration, - sourceFile: tokenInfo.knownTokenSourceFile, - moduleSpecifier: tokenInfo.knownTokenModuleSpecifier, - importExportSpecifierName: tokenInfo.knownTokenImportExportName, - importExportSpecifier: tokenInfo.knownTokenImportExportSpecifier, - valueDeclarationValue: tokenInfo.knownTokenValueDeclarationValue, - }; - } - } catch (err) { - log(`Error finding export declaration for ${exportName}:`, err); - return undefined; +// Helper to extract template groups if the declaration node is a template expression +function addTemplateGroups(imported: ImportedValue) { + const declNode = imported.declarationNode; + if (!declNode) { + return; } -} - -/** - * Extract string value from a declaration node - */ -function extractValueFromDeclaration(declaration: Node): { value: string } | undefined { - // Extract the value from the declaration - // TODO find the value of the token and then pass it into the values. This might be a string or template literal and - // we can use this later but it isn't needed for token identification, more for value processing down the line - // Handle variable declarations - if (Node.isVariableDeclaration(declaration)) { - return { value: declaration.getNameNode().getText() }; + let exprNode; + if (Node.isVariableDeclaration(declNode)) { + exprNode = declNode.getInitializer(); + } else if (Node.isExportAssignment(declNode)) { + exprNode = declNode.getExpression(); + } + if (exprNode && Node.isTemplateExpression(exprNode)) { + imported.templateGroups = extractNodesFromTemplateStringLiteral(exprNode).extractedExpressions; } - - // TODO IF NEEDED - // Handle right side of declaration if the assignment is from a known token package - // Handle template literals here if needed (we don't so far but may). - // We might also need to fully process the value but we'd do this by calling getAliasNode() on the value node and - // then getting the value declaration. - - return undefined; } + +// Local export-resolution functions moved to reexportResolver.ts diff --git a/packages/token-analyzer/src/importAnalyzerFlow.md b/packages/token-analyzer/src/importAnalyzerFlow.md new file mode 100644 index 000000000..37f514e48 --- /dev/null +++ b/packages/token-analyzer/src/importAnalyzerFlow.md @@ -0,0 +1,25 @@ +- analyze imports +- split into 3 paths to analyze named, default and namespace imports +- converge back to same path and analyze each import +- determine if it's a direct import of a token, if it is, log it and move on +- if it's not a direct token import, but is relative, we should analyze any aliases or value delcarations. We can also use `isExternalModuleNameRelative` to figure out if we should keep digging for aliases. So even if, for some reason, we have an alias but it's past the relative boundary, we can exit processing. We shouldn't dig into modules here. +- if it's not a direct token import, and is not relative, we know we've hit a boundary and can stop processing. + +```mermaid +flowchart TD + A[Start: Analyze Imports] --> B{Import Type?} + B --> |Named| C[Analyze Named Import] + B --> |Default| D[Analyze Default Import] + B --> |Namespace| E[Analyze Namespace Import] + C --> F[Converge Paths] + D --> F + E --> F + F[Analyze Each Import] --> G{Direct Token Import?} + G --> |Yes| H[Log & Continue] + G --> |No| I{Relative Import?} + I --> |Yes| J[Inspect Aliases/Value Declarations
Use isExternalModuleNameRelative] + J --> K{Within Relative Boundary?} + K --> |Yes| F + K --> |No| L[Exit Processing] + I --> |No| M[Stop Processing] +``` diff --git a/packages/token-analyzer/src/reexportResolver.ts b/packages/token-analyzer/src/reexportResolver.ts new file mode 100644 index 000000000..6faea9fcf --- /dev/null +++ b/packages/token-analyzer/src/reexportResolver.ts @@ -0,0 +1,122 @@ +import { Node, SourceFile, Symbol, TypeChecker, ImportDeclaration, SyntaxKind, ExportDeclaration } from 'ts-morph'; +import { log } from './debugUtils.js'; +import { knownTokenImportsAndModules } from './types.js'; + +export interface ExportInfo { + declaration: Node; + sourceFile: SourceFile; + moduleSpecifier: string; + importExportSpecifierName?: string; + valueDeclarationValue?: string; +} + +export function isKnownTokenPackage(moduleSpecifier: string, valueName?: string): boolean { + const keys = Object.keys(knownTokenImportsAndModules); + return ( + (valueName && keys.includes(valueName) && knownTokenImportsAndModules[valueName].includes(moduleSpecifier)) || + knownTokenImportsAndModules.default.includes(moduleSpecifier) + ); +} + +// Resolves an export name to its original declaration, following aliases and re-exports +export function resolveExport( + sourceFile: SourceFile, + exportName: string, + typeChecker: TypeChecker +): ExportInfo | undefined { + try { + // Handle named re-exports (export { foo as bar } from './module') + for (const exportDecl of sourceFile.getExportDeclarations()) { + const targetFile = exportDecl.getModuleSpecifierSourceFile(); + if (!targetFile) continue; + for (const specifier of exportDecl.getNamedExports()) { + const alias = specifier.getAliasNode()?.getText() ?? specifier.getNameNode().getText(); + const name = specifier.getNameNode().getText(); + if (alias === exportName) { + // Follow into target module with original name + return resolveExport(targetFile, name, typeChecker); + } + } + } + + // Get module symbol + const moduleSymbol = typeChecker.getSymbolAtLocation(sourceFile); + if (!moduleSymbol) return undefined; + // Get all direct exports + const exportsArr = typeChecker.getExportsOfModule(moduleSymbol); + // Find direct export symbol + let symbol = exportsArr.find((s) => s.getName() === exportName); + + // Handle star re-exports (export * from 'module') + if (!symbol) { + for (const exportDecl of sourceFile.getExportDeclarations()) { + // include export * from. isNamespaceExport covers 'export * as', but also check if no namedExports + if (!exportDecl.isNamespaceExport() && exportDecl.getNamedExports().length === 0) { + const target = exportDecl.getModuleSpecifierSourceFile(); + if (target) { + const nested = resolveExport(target, exportName, typeChecker); + if (nested) return nested; + } + } + } + return undefined; + } + + // Walk alias chain and check for knownTokenPackage at each step + let currentSymbol: Symbol | undefined = symbol; + while (currentSymbol) { + const decls = currentSymbol.getDeclarations(); + for (const decl of decls) { + if (Node.isImportSpecifier(decl)) { + const importDecl = decl.getFirstAncestorByKind(SyntaxKind.ImportDeclaration) as ImportDeclaration; + if (importDecl) { + const moduleSpecifier = importDecl.getModuleSpecifierValue(); + const valueName = decl.getName(); + if (isKnownTokenPackage(moduleSpecifier, valueName)) { + return { + declaration: decl, + sourceFile: importDecl.getSourceFile(), + moduleSpecifier, + importExportSpecifierName: valueName, + valueDeclarationValue: valueName, + }; + } + } + } + } + if (!currentSymbol.isAlias()) break; + const aliased = typeChecker.getImmediatelyAliasedSymbol(currentSymbol) as Symbol; + if (!aliased || aliased === currentSymbol) break; + currentSymbol = aliased; + } + + // Fallback: use local declaration in this module + const decl = symbol.getValueDeclaration() ?? symbol.getDeclarations()[0]; + if (!decl) return undefined; + + const declSourceFile = decl.getSourceFile(); + const moduleSpecifier = declSourceFile.getFilePath(); + // Extract the value text (initializer or expression) for accurate token reference + let valueDeclarationValue: string | undefined; + if (Node.isVariableDeclaration(decl)) { + const init = decl.getInitializer(); + valueDeclarationValue = init?.getText(); + } else if (Node.isExportAssignment(decl)) { + const expr = decl.getExpression(); + valueDeclarationValue = expr.getText(); + } else { + valueDeclarationValue = decl.getText(); + } + + return { + declaration: decl, + sourceFile: declSourceFile, + moduleSpecifier, + importExportSpecifierName: symbol.getName(), + valueDeclarationValue, + }; + } catch (e) { + log(`Error resolving export ${exportName} in ${sourceFile.getFilePath()}:`, e); + return undefined; + } +} diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index e65dde216..88764530d 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -69,20 +69,33 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ const text = node.getText(); const intializerNode = getInitializerFromIdentifier(node); - // knownTokenPackage is set to false for our importTest if (isTokenReference(info)) { - // Found a token, we should process and return it const propertyName = path[path.length - 1] ?? parentName; - returnTokens = addTokenToArray( - { - property: propertyName, - token: [importedValues.get(text)?.value ?? text], - path, - }, - returnTokens, - isVariableReference, - sourceFile - ); + const importedVal = importedValues.get(text)!; + if (importedVal.templateGroups && importedVal.templateGroups.length > 0) { + importedVal.templateGroups.forEach((group) => { + const grouped: TokenReference = { property: propertyName, token: [], path }; + group.forEach((exprNode) => { + const nestedTokens = resolveToken({ ...info, tokens: [], node: exprNode }); + nestedTokens.forEach((t) => grouped.token.push(...t.token)); + }); + if (grouped.token.length > 0) { + returnTokens.push(grouped); + } + }); + } else { + returnTokens = addTokenToArray( + { + property: propertyName, + token: [importedVal.value], + path, + }, + returnTokens, + isVariableReference, + sourceFile + ); + } + return returnTokens; } else if (intializerNode) { // we have a variable declaration and we should then check if the value is a token as well. Reprocess the node returnTokens = returnTokens.concat(resolveToken({ ...info, node: intializerNode })); From 3bce4c0236984788799fd638024b4af3a7ac4019 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Sun, 18 May 2025 23:42:57 -0400 Subject: [PATCH 54/75] moving analyzer flow diagram --- packages/token-analyzer/{src => }/importAnalyzerFlow.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename packages/token-analyzer/{src => }/importAnalyzerFlow.md (100%) diff --git a/packages/token-analyzer/src/importAnalyzerFlow.md b/packages/token-analyzer/importAnalyzerFlow.md similarity index 100% rename from packages/token-analyzer/src/importAnalyzerFlow.md rename to packages/token-analyzer/importAnalyzerFlow.md From 54cb420d2bf29b97244b5a0312f1fea94a110baf Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Sun, 18 May 2025 23:45:32 -0400 Subject: [PATCH 55/75] clean up of unused types fixing import analyzer to identify tokens deeply --- .../src/__tests__/reexportResolver.test.ts | 24 ++- packages/token-analyzer/src/importAnalyzer.ts | 85 ++++------ .../token-analyzer/src/reexportResolver.ts | 154 ++++++++---------- packages/token-analyzer/src/tokenResolver.ts | 9 +- packages/token-analyzer/src/tokenUtils.ts | 13 +- packages/token-analyzer/src/types.ts | 2 - 6 files changed, 130 insertions(+), 157 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/reexportResolver.test.ts b/packages/token-analyzer/src/__tests__/reexportResolver.test.ts index f86a6c951..f8211b1bc 100644 --- a/packages/token-analyzer/src/__tests__/reexportResolver.test.ts +++ b/packages/token-analyzer/src/__tests__/reexportResolver.test.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/no-non-null-assertion */ import { Project, ModuleKind } from 'ts-morph'; import { resolveExport } from '../reexportResolver'; describe('reexportResolver.resolveExport', () => { @@ -12,7 +13,7 @@ describe('reexportResolver.resolveExport', () => { it('resolves a direct export in the same file', () => { const file = project.createSourceFile('A.ts', `export const foo = 'bar';`); - const info = resolveExport(file, 'foo', project.getTypeChecker()); + const info = resolveExport(file, 'foo', project.getTypeChecker(), project); expect(info).toBeDefined(); expect(info!.sourceFile.getBaseName()).toBe('A.ts'); expect(info!.declaration.getText()).toContain('foo'); @@ -21,7 +22,7 @@ describe('reexportResolver.resolveExport', () => { it('resolves a re-export from another module', () => { project.createSourceFile('A.ts', `export const foo = 'baz';`); const fileB = project.createSourceFile('B.ts', `export { foo } from './A';`); - const infoB = resolveExport(fileB, 'foo', project.getTypeChecker()); + const infoB = resolveExport(fileB, 'foo', project.getTypeChecker(), project); expect(infoB).toBeDefined(); expect(infoB!.sourceFile.getBaseName()).toBe('A.ts'); expect(infoB!.declaration.getText()).toContain('foo'); @@ -30,16 +31,31 @@ describe('reexportResolver.resolveExport', () => { it('resolves a renamed re-export alias', () => { project.createSourceFile('A.ts', `export const foo = 123;`); const fileC = project.createSourceFile('C.ts', `export { foo as foo2 } from './A';`); - const infoC = resolveExport(fileC, 'foo2', project.getTypeChecker()); + const infoC = resolveExport(fileC, 'foo2', project.getTypeChecker(), project); expect(infoC).toBeDefined(); expect(infoC!.sourceFile.getBaseName()).toBe('A.ts'); expect(infoC!.importExportSpecifierName).toBe('foo'); expect(infoC!.declaration.getText()).toContain('foo'); }); + it('resolves an export that re-exports an imported symbol', () => { + project.createSourceFile('A.ts', `export const foo = 42;`); + const file = project.createSourceFile( + 'B.ts', + ` + import { foo } from './A'; + export const test = foo; + ` + ); + const info = resolveExport(file, 'test', project.getTypeChecker(), project); + expect(info).toBeDefined(); + expect(info!.sourceFile.getBaseName()).toBe('A.ts'); + expect(info!.declaration.getText()).toContain('foo'); + }); + it('returns undefined for missing export', () => { const file = project.createSourceFile('X.ts', `export const a = 1;`); - const info = resolveExport(file, 'nope', project.getTypeChecker()); + const info = resolveExport(file, 'nope', project.getTypeChecker(), project); expect(info).toBeUndefined(); }); }); diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index 4aab7612c..7a3057cc9 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -1,10 +1,9 @@ // importAnalyzer.ts -import { Project, Node, SourceFile, ImportDeclaration, Symbol, TypeChecker, ts } from 'ts-morph'; +import { Project, Node, SourceFile, ImportDeclaration, TypeChecker, ts } from 'ts-morph'; import { log } from './debugUtils.js'; -import { resolveExport, isKnownTokenPackage, ExportInfo } from './reexportResolver'; +import { resolveExport, ExportInfo } from './reexportResolver'; import { getModuleSourceFile } from './moduleResolver.js'; -import { getInitializerFromIdentifier } from './tokenUtils'; -import { extractNodesFromTemplateStringLiteral } from './processTemplateStringLiteral'; +import { isKnownTokenPackage } from './tokenUtils'; /** * Represents a value imported from another module @@ -25,6 +24,7 @@ interface ImportContext { importedFile: SourceFile; typeChecker: TypeChecker; importedValues: Map; + project: Project; } /** @@ -78,6 +78,7 @@ async function processImportDeclaration( importedFile, typeChecker, importedValues, + project, }; // Process named imports (import { x } from 'module') @@ -93,12 +94,11 @@ async function processImportDeclaration( * Process named imports using TypeScript's type checker to follow re-exports */ function processNamedImports(context: ImportContext): void { - const { importDecl, typeChecker } = context; + const { importDecl, typeChecker, project } = context; - for (const namedImport of importDecl.getNamedImports()) { - console.log( - `is this module ${context.moduleSpecifier} relative? ${ts.isExternalModuleNameRelative(context.moduleSpecifier)}` - ); + const namedImports = importDecl.getNamedImports(); + + namedImports.forEach((namedImport) => { const nameOrAliasNode = namedImport.getAliasNode() ?? namedImport; const importName = namedImport.getName(); const alias = namedImport.getAliasNode()?.getText() || importName; @@ -106,23 +106,25 @@ function processNamedImports(context: ImportContext): void { if (isKnownTokenPackage(context.moduleSpecifier, importName)) { // we have a direct token import, record it and move on. recordImport(context, alias, nameOrAliasNode); - addTemplateGroups(context.importedValues.get(alias)!); - } else { - const exportInfo: ExportInfo | undefined = resolveExport(context.importedFile, importName, typeChecker); + } else if (ts.isExternalModuleNameRelative(context.moduleSpecifier)) { + // We know it's not a direct token reference but it's a relative import, so it could contain + // token references and we need to do further processing. + + const exportInfo: ExportInfo | undefined = resolveExport(context.importedFile, importName, typeChecker, project); if (exportInfo) { recordImport(context, alias, nameOrAliasNode, exportInfo); - addTemplateGroups(context.importedValues.get(alias)!); + // addTemplateGroups(context.importedValues.get(alias)!); } } - } + }); } /** * Process default import using TypeScript's type checker */ function processDefaultImport(context: ImportContext): void { - const { importDecl, typeChecker } = context; + const { importDecl, typeChecker, project } = context; const defaultImport = importDecl.getDefaultImport(); if (!defaultImport) { @@ -133,13 +135,11 @@ function processDefaultImport(context: ImportContext): void { const importName = defaultImport.getText(); if (isKnownTokenPackage(context.moduleSpecifier)) { recordImport(context, importName, importDecl); - addTemplateGroups(context.importedValues.get(importName)!); } else { - const exportInfo: ExportInfo | undefined = resolveExport(context.importedFile, 'default', typeChecker); + const exportInfo: ExportInfo | undefined = resolveExport(context.importedFile, 'default', typeChecker, project); if (exportInfo) { recordImport(context, importName, defaultImport, exportInfo); - addTemplateGroups(context.importedValues.get(importName)!); } } } @@ -166,42 +166,19 @@ function processNamespaceImport(context: ImportContext): void { // Helper to record an import consistently function recordImport(ctx: ImportContext, alias: string, node: Node, exportInfo?: ExportInfo): void { - const { importedValues, moduleSpecifier, importedFile } = ctx; - const pkg = exportInfo?.moduleSpecifier ?? moduleSpecifier; - const name = exportInfo?.importExportSpecifierName ?? alias; + const { importedValues, importedFile } = ctx; // Only record known token imports - if (isKnownTokenPackage(pkg, name)) { - const source = exportInfo?.sourceFile ?? importedFile; - - // Use actual token literal when available - const importValue = exportInfo?.valueDeclarationValue ?? alias; - importedValues.set(alias, { - value: importValue, - node, - sourceFile: source.getFilePath(), - declaredValue: exportInfo?.valueDeclarationValue, - declarationNode: exportInfo?.declaration, - }); - log(`Recorded token import: ${alias} from ${source.getFilePath()}`); - } -} - -// Helper to extract template groups if the declaration node is a template expression -function addTemplateGroups(imported: ImportedValue) { - const declNode = imported.declarationNode; - if (!declNode) { - return; - } - let exprNode; - if (Node.isVariableDeclaration(declNode)) { - exprNode = declNode.getInitializer(); - } else if (Node.isExportAssignment(declNode)) { - exprNode = declNode.getExpression(); - } - if (exprNode && Node.isTemplateExpression(exprNode)) { - imported.templateGroups = extractNodesFromTemplateStringLiteral(exprNode).extractedExpressions; - } + const source = exportInfo?.sourceFile ?? importedFile; + + // Use actual token literal when available + const importValue = exportInfo?.valueDeclarationValue ?? alias; + importedValues.set(alias, { + value: importValue, + node, + sourceFile: source.getFilePath(), + declaredValue: exportInfo?.valueDeclarationValue, + declarationNode: exportInfo?.declaration, + }); + log(`Recorded token import: ${alias} from ${source.getFilePath()}`); } - -// Local export-resolution functions moved to reexportResolver.ts diff --git a/packages/token-analyzer/src/reexportResolver.ts b/packages/token-analyzer/src/reexportResolver.ts index 6faea9fcf..118fee022 100644 --- a/packages/token-analyzer/src/reexportResolver.ts +++ b/packages/token-analyzer/src/reexportResolver.ts @@ -1,6 +1,7 @@ -import { Node, SourceFile, Symbol, TypeChecker, ImportDeclaration, SyntaxKind, ExportDeclaration } from 'ts-morph'; +import { Node, SourceFile, TypeChecker, SyntaxKind, ts, Project } from 'ts-morph'; import { log } from './debugUtils.js'; -import { knownTokenImportsAndModules } from './types.js'; +import { isKnownTokenPackage } from './tokenUtils'; +import { getModuleSourceFile } from './moduleResolver'; export interface ExportInfo { declaration: Node; @@ -10,111 +11,90 @@ export interface ExportInfo { valueDeclarationValue?: string; } -export function isKnownTokenPackage(moduleSpecifier: string, valueName?: string): boolean { - const keys = Object.keys(knownTokenImportsAndModules); - return ( - (valueName && keys.includes(valueName) && knownTokenImportsAndModules[valueName].includes(moduleSpecifier)) || - knownTokenImportsAndModules.default.includes(moduleSpecifier) - ); -} - // Resolves an export name to its original declaration, following aliases and re-exports export function resolveExport( sourceFile: SourceFile, exportName: string, - typeChecker: TypeChecker + typeChecker: TypeChecker, + project: Project ): ExportInfo | undefined { try { - // Handle named re-exports (export { foo as bar } from './module') - for (const exportDecl of sourceFile.getExportDeclarations()) { - const targetFile = exportDecl.getModuleSpecifierSourceFile(); - if (!targetFile) continue; - for (const specifier of exportDecl.getNamedExports()) { - const alias = specifier.getAliasNode()?.getText() ?? specifier.getNameNode().getText(); - const name = specifier.getNameNode().getText(); - if (alias === exportName) { - // Follow into target module with original name - return resolveExport(targetFile, name, typeChecker); - } - } - } - // Get module symbol const moduleSymbol = typeChecker.getSymbolAtLocation(sourceFile); - if (!moduleSymbol) return undefined; + if (!moduleSymbol) { + return undefined; + } + // Get all direct exports const exportsArr = typeChecker.getExportsOfModule(moduleSymbol); // Find direct export symbol - let symbol = exportsArr.find((s) => s.getName() === exportName); + const symbol = exportsArr.find((s) => s.getName() === exportName); + if (symbol) { + const exportSpecifier = symbol?.getDeclarations().find(Node.isExportSpecifier); + const varDeclaration = symbol?.getDeclarations().find(Node.isVariableDeclaration); - // Handle star re-exports (export * from 'module') - if (!symbol) { - for (const exportDecl of sourceFile.getExportDeclarations()) { - // include export * from. isNamespaceExport covers 'export * as', but also check if no namedExports - if (!exportDecl.isNamespaceExport() && exportDecl.getNamedExports().length === 0) { - const target = exportDecl.getModuleSpecifierSourceFile(); - if (target) { - const nested = resolveExport(target, exportName, typeChecker); - if (nested) return nested; - } - } - } - return undefined; - } - - // Walk alias chain and check for knownTokenPackage at each step - let currentSymbol: Symbol | undefined = symbol; - while (currentSymbol) { - const decls = currentSymbol.getDeclarations(); - for (const decl of decls) { - if (Node.isImportSpecifier(decl)) { - const importDecl = decl.getFirstAncestorByKind(SyntaxKind.ImportDeclaration) as ImportDeclaration; - if (importDecl) { - const moduleSpecifier = importDecl.getModuleSpecifierValue(); - const valueName = decl.getName(); - if (isKnownTokenPackage(moduleSpecifier, valueName)) { + if (varDeclaration) { + // if we have a simple variable declaration that points to a known token import, we can return it, + // if we have a template expression, we need to process with extractNodesFromTemplateStringLiteral + // and then determine if any of the nodes are known token packages. + const initializer = varDeclaration.getInitializer(); + console.log(`getting the type of var declaration ${initializer?.getKindName()}`); + if (Node.isIdentifier(initializer)) { + const importSpecifier = initializer.getSymbol()?.getDeclarations().find(Node.isImportSpecifier); + if (importSpecifier) { + const specifierName = importSpecifier.getName(); + const importDeclaration = importSpecifier.getFirstAncestorByKind(SyntaxKind.ImportDeclaration); + const moduleSpecifier = importDeclaration?.getModuleSpecifierValue(); + if (moduleSpecifier !== undefined && isKnownTokenPackage(moduleSpecifier, specifierName)) { + // found a known token, process return { - declaration: decl, - sourceFile: importDecl.getSourceFile(), + declaration: importSpecifier, + sourceFile: importSpecifier.getSourceFile(), moduleSpecifier, - importExportSpecifierName: valueName, - valueDeclarationValue: valueName, + importExportSpecifierName: specifierName, + valueDeclarationValue: specifierName, }; + } else if (moduleSpecifier !== undefined && ts.isExternalModuleNameRelative(moduleSpecifier)) { + const moduleSourceFile = getModuleSourceFile(project, moduleSpecifier, sourceFile.getFilePath()); + if (moduleSourceFile) { + return resolveExport(moduleSourceFile, specifierName, typeChecker, project); + } + } + } + } else if (Node.isTemplateExpression(initializer)) { + console.log(`found template expression ${initializer.getText()}`); + } else if (Node.isPropertyAccessExpression(initializer)) { + console.log(`found property access ${initializer.getText()}`); + } + } + if (exportSpecifier) { + // If we have an export specifier, determine if we have a known token. + // If not, we need to find the module name and see if it's relative. If it is, we should recursively call this + // function to determine if there is a token. If it's not relative, we then should just end as the module + // isn't a token source. + const exportDeclaration = exportSpecifier.getFirstAncestorByKind(SyntaxKind.ExportDeclaration); + const exportSpecifierName = exportSpecifier.getName(); + if (exportDeclaration) { + const moduleSpecifier = exportDeclaration.getModuleSpecifierValue(); + if (moduleSpecifier !== undefined && isKnownTokenPackage(moduleSpecifier, exportSpecifierName)) { + // We found a known token source, return it + return { + declaration: exportSpecifier, + sourceFile: exportDeclaration.getSourceFile(), + moduleSpecifier, + importExportSpecifierName: exportSpecifierName, + valueDeclarationValue: exportSpecifierName, + }; + } else if (moduleSpecifier !== undefined && ts.isExternalModuleNameRelative(moduleSpecifier)) { + // We have a relative module specifier, we need to resolve it + const moduleSourceFile = getModuleSourceFile(project, moduleSpecifier, sourceFile.getFilePath()); + if (moduleSourceFile) { + return resolveExport(moduleSourceFile, exportSpecifierName, typeChecker, project); } } } } - if (!currentSymbol.isAlias()) break; - const aliased = typeChecker.getImmediatelyAliasedSymbol(currentSymbol) as Symbol; - if (!aliased || aliased === currentSymbol) break; - currentSymbol = aliased; - } - - // Fallback: use local declaration in this module - const decl = symbol.getValueDeclaration() ?? symbol.getDeclarations()[0]; - if (!decl) return undefined; - - const declSourceFile = decl.getSourceFile(); - const moduleSpecifier = declSourceFile.getFilePath(); - // Extract the value text (initializer or expression) for accurate token reference - let valueDeclarationValue: string | undefined; - if (Node.isVariableDeclaration(decl)) { - const init = decl.getInitializer(); - valueDeclarationValue = init?.getText(); - } else if (Node.isExportAssignment(decl)) { - const expr = decl.getExpression(); - valueDeclarationValue = expr.getText(); - } else { - valueDeclarationValue = decl.getText(); } - - return { - declaration: decl, - sourceFile: declSourceFile, - moduleSpecifier, - importExportSpecifierName: symbol.getName(), - valueDeclarationValue, - }; } catch (e) { log(`Error resolving export ${exportName} in ${sourceFile.getFilePath()}:`, e); return undefined; diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index 88764530d..b61bd6696 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -63,7 +63,7 @@ const processStringLiteral = (info: TokenResolverInfo): TokenRefe }; const processIdentifier = (info: TokenResolverInfo): TokenReference[] => { - const { node, parentName, path, tokens, isVariableReference, sourceFile, importedValues } = info; + const { node, parentName, path, tokens, sourceFile, importedValues } = info; let returnTokens = tokens.slice(); @@ -91,7 +91,6 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ path, }, returnTokens, - isVariableReference, sourceFile ); } @@ -105,7 +104,7 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ }; const processPropertyAccess = (info: TokenResolverInfo): TokenReference[] => { - const { node, parentName, path, tokens, isVariableReference, sourceFile } = info; + const { node, parentName, path, tokens, sourceFile } = info; const text = node.getText(); const isToken = isTokenReference(info); @@ -117,7 +116,6 @@ const processPropertyAccess = (info: TokenResolverInfo path, }, tokens, - isVariableReference, sourceFile ); } @@ -193,7 +191,7 @@ const processFocusCallExpression = (info: TokenResolverInfo): To }; const processCallExpression = (info: TokenResolverInfo): TokenReference[] => { - const { node, path, tokens, importedValues, isVariableReference, sourceFile } = info; + const { node, path, tokens, importedValues, sourceFile } = info; let returnTokens = tokens.slice(); // Process calls like shorthands.borderColor(tokens.color) @@ -217,7 +215,6 @@ const processCallExpression = (info: TokenResolverInfo): TokenRe path: path.concat(argument.property), }, returnTokens, - isVariableReference, sourceFile ); }); diff --git a/packages/token-analyzer/src/tokenUtils.ts b/packages/token-analyzer/src/tokenUtils.ts index 2593c1d4c..7f9c00e57 100644 --- a/packages/token-analyzer/src/tokenUtils.ts +++ b/packages/token-analyzer/src/tokenUtils.ts @@ -1,6 +1,6 @@ // tokenUtils.ts import { Symbol, SyntaxKind, Node, Expression } from 'ts-morph'; -import { TOKEN_REGEX, TokenReference, TokenResolverInfo } from './types.js'; +import { knownTokenImportsAndModules, TOKEN_REGEX, TokenReference, TokenResolverInfo } from './types.js'; import { shorthands } from '@griffel/react'; export function isTokenReference(info: TokenResolverInfo): boolean { @@ -183,7 +183,6 @@ export function getPropertiesForShorthand(functionName: string, args: Node[]): { export const addTokenToArray = ( tokensToAdd: TokenReference[] | TokenReference, target: TokenReference[], - isVariableReference?: boolean, sourceFile?: string ) => { // create new array without modifying the original array @@ -194,14 +193,12 @@ export const addTokenToArray = ( newArray.push( ...tokensToAdd.map((token) => ({ ...token, - ...(isVariableReference && { isVariableReference }), ...(sourceFile && { sourceFile }), })) ); } else { newArray.push({ ...tokensToAdd, - ...(isVariableReference && { isVariableReference }), ...(sourceFile && { sourceFile }), }); } @@ -209,3 +206,11 @@ export const addTokenToArray = ( // return array without modifying the original array return newArray; }; + +export function isKnownTokenPackage(moduleSpecifier: string, valueName?: string): boolean { + const keys = Object.keys(knownTokenImportsAndModules); + return ( + (valueName && keys.includes(valueName) && knownTokenImportsAndModules[valueName].includes(moduleSpecifier)) || + knownTokenImportsAndModules.default.includes(moduleSpecifier) + ); +} diff --git a/packages/token-analyzer/src/types.ts b/packages/token-analyzer/src/types.ts index 43dbe59f1..5531b8eaf 100644 --- a/packages/token-analyzer/src/types.ts +++ b/packages/token-analyzer/src/types.ts @@ -6,7 +6,6 @@ export interface TokenReference { property: string; token: string[]; path: string[]; - isVariableReference?: boolean; sourceFile?: string; } @@ -78,6 +77,5 @@ export interface TokenResolverInfo { tokens: TokenReference[]; importedValues: Map; project: Project; - isVariableReference?: boolean; sourceFile?: string; } From d1ad55bccfa5bc1fb1c95103ba4eeb6bbc7ba4fd Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 20 May 2025 02:01:08 -0700 Subject: [PATCH 56/75] update css test property recurse property access expressions and resolve exports --- .../src/__tests__/cssVarE2E.test.ts | 10 +++-- .../token-analyzer/src/reexportResolver.ts | 42 ++++++++++++++++++- 2 files changed, 48 insertions(+), 4 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts index 7ff77f4e8..58b460c18 100644 --- a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts +++ b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts @@ -239,7 +239,7 @@ describe('CSS Variable Cross-Module Resolution E2E', () => { // aliased and imported CSS var marginRight:someMargin, // aliased and imported CSS var with another level of indirection - marginRight:someOtherMargin + marginLeft:someOtherMargin } }); @@ -294,8 +294,12 @@ describe('CSS Variable Cross-Module Resolution E2E', () => { token: ['tokens.colorBrandPrimary'], }), expect.objectContaining({ - property: 'padding', - token: ['tokens.colorBrandSecondary'], + property: 'marginRight', + token: ['tokens.spacingHorizontalXXL'], + }), + expect.objectContaining({ + property: 'marginLeft', + token: ['tokens.spacingVerticalXXL'], }), ]) ); diff --git a/packages/token-analyzer/src/reexportResolver.ts b/packages/token-analyzer/src/reexportResolver.ts index 118fee022..2fa5b9462 100644 --- a/packages/token-analyzer/src/reexportResolver.ts +++ b/packages/token-analyzer/src/reexportResolver.ts @@ -60,11 +60,51 @@ export function resolveExport( return resolveExport(moduleSourceFile, specifierName, typeChecker, project); } } + } else { + // if we don't have an import specifier, we should check of there's another delcaration and then resolve that as well + // This couuld be a var that points to another var that points to a known token for example. + console.log(`no import specifier found for ${initializer.getText()}, it's a ${initializer.getKindName()}`); } } else if (Node.isTemplateExpression(initializer)) { console.log(`found template expression ${initializer.getText()}`); } else if (Node.isPropertyAccessExpression(initializer)) { - console.log(`found property access ${initializer.getText()}`); + console.log( + `found property access ${initializer.getText()}, expression ${initializer.getExpression().getText()}` + ); + const expressionSymbol = initializer.getExpression().getSymbol(); + const expressionImportSpecifier = expressionSymbol?.getDeclarations().find(Node.isImportSpecifier); + if (expressionImportSpecifier) { + const expressionSpecifierName = expressionImportSpecifier.getName(); + const expressionImportDeclaration = expressionImportSpecifier.getFirstAncestorByKind( + SyntaxKind.ImportDeclaration + ); + const expressionModuleSpecifier = expressionImportDeclaration?.getModuleSpecifierValue(); + if ( + expressionModuleSpecifier !== undefined && + isKnownTokenPackage(expressionModuleSpecifier, expressionSpecifierName) + ) { + // found a known token, process + return { + declaration: expressionImportSpecifier, + sourceFile: expressionImportSpecifier.getSourceFile(), + moduleSpecifier: expressionModuleSpecifier, + importExportSpecifierName: expressionSpecifierName, + valueDeclarationValue: initializer.getText(), + }; + } else if ( + expressionModuleSpecifier !== undefined && + ts.isExternalModuleNameRelative(expressionModuleSpecifier) + ) { + const moduleSourceFile = getModuleSourceFile( + project, + expressionModuleSpecifier, + sourceFile.getFilePath() + ); + if (moduleSourceFile) { + return resolveExport(moduleSourceFile, expressionSpecifierName, typeChecker, project); + } + } + } } } if (exportSpecifier) { From bddb0d203e413a99d439ed2ae9406b50c50644ed Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Tue, 20 May 2025 15:25:50 -0700 Subject: [PATCH 57/75] process template expression imports remove invalid test update css test --- .../src/__tests__/cssVarE2E.test.ts | 2 +- .../src/__tests__/reexportResolver.test.ts | 61 --------------- packages/token-analyzer/src/importAnalyzer.ts | 1 + .../token-analyzer/src/reexportResolver.ts | 76 ++++++++++++++++--- 4 files changed, 69 insertions(+), 71 deletions(-) delete mode 100644 packages/token-analyzer/src/__tests__/reexportResolver.test.ts diff --git a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts index 58b460c18..604260a99 100644 --- a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts +++ b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts @@ -141,7 +141,7 @@ describe('CSS Variable Token Extraction E2E', () => { expect(useStyles.nestedCssVar.tokens).toContainEqual( expect.objectContaining({ property: 'background', - token: ['tokens.colorBrandForeground2'], + token: ['ctrlLinkForegroundBrandHover', 'tokens.colorBrandForeground2'], }) ); diff --git a/packages/token-analyzer/src/__tests__/reexportResolver.test.ts b/packages/token-analyzer/src/__tests__/reexportResolver.test.ts deleted file mode 100644 index f8211b1bc..000000000 --- a/packages/token-analyzer/src/__tests__/reexportResolver.test.ts +++ /dev/null @@ -1,61 +0,0 @@ -/* eslint-disable @typescript-eslint/no-non-null-assertion */ -import { Project, ModuleKind } from 'ts-morph'; -import { resolveExport } from '../reexportResolver'; -describe('reexportResolver.resolveExport', () => { - let project: Project; - beforeEach(() => { - project = new Project({ - useInMemoryFileSystem: true, - skipFileDependencyResolution: true, - compilerOptions: { module: ModuleKind.CommonJS }, - }); - }); - - it('resolves a direct export in the same file', () => { - const file = project.createSourceFile('A.ts', `export const foo = 'bar';`); - const info = resolveExport(file, 'foo', project.getTypeChecker(), project); - expect(info).toBeDefined(); - expect(info!.sourceFile.getBaseName()).toBe('A.ts'); - expect(info!.declaration.getText()).toContain('foo'); - }); - - it('resolves a re-export from another module', () => { - project.createSourceFile('A.ts', `export const foo = 'baz';`); - const fileB = project.createSourceFile('B.ts', `export { foo } from './A';`); - const infoB = resolveExport(fileB, 'foo', project.getTypeChecker(), project); - expect(infoB).toBeDefined(); - expect(infoB!.sourceFile.getBaseName()).toBe('A.ts'); - expect(infoB!.declaration.getText()).toContain('foo'); - }); - - it('resolves a renamed re-export alias', () => { - project.createSourceFile('A.ts', `export const foo = 123;`); - const fileC = project.createSourceFile('C.ts', `export { foo as foo2 } from './A';`); - const infoC = resolveExport(fileC, 'foo2', project.getTypeChecker(), project); - expect(infoC).toBeDefined(); - expect(infoC!.sourceFile.getBaseName()).toBe('A.ts'); - expect(infoC!.importExportSpecifierName).toBe('foo'); - expect(infoC!.declaration.getText()).toContain('foo'); - }); - - it('resolves an export that re-exports an imported symbol', () => { - project.createSourceFile('A.ts', `export const foo = 42;`); - const file = project.createSourceFile( - 'B.ts', - ` - import { foo } from './A'; - export const test = foo; - ` - ); - const info = resolveExport(file, 'test', project.getTypeChecker(), project); - expect(info).toBeDefined(); - expect(info!.sourceFile.getBaseName()).toBe('A.ts'); - expect(info!.declaration.getText()).toContain('foo'); - }); - - it('returns undefined for missing export', () => { - const file = project.createSourceFile('X.ts', `export const a = 1;`); - const info = resolveExport(file, 'nope', project.getTypeChecker(), project); - expect(info).toBeUndefined(); - }); -}); diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index 7a3057cc9..5887143ef 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -179,6 +179,7 @@ function recordImport(ctx: ImportContext, alias: string, node: Node, exportInfo? sourceFile: source.getFilePath(), declaredValue: exportInfo?.valueDeclarationValue, declarationNode: exportInfo?.declaration, + templateGroups: exportInfo?.templateGroups, }); log(`Recorded token import: ${alias} from ${source.getFilePath()}`); } diff --git a/packages/token-analyzer/src/reexportResolver.ts b/packages/token-analyzer/src/reexportResolver.ts index 2fa5b9462..7a5af7573 100644 --- a/packages/token-analyzer/src/reexportResolver.ts +++ b/packages/token-analyzer/src/reexportResolver.ts @@ -1,7 +1,8 @@ -import { Node, SourceFile, TypeChecker, SyntaxKind, ts, Project } from 'ts-morph'; +import { Node, SourceFile, TypeChecker, SyntaxKind, ts, Project, ImportSpecifier } from 'ts-morph'; import { log } from './debugUtils.js'; import { isKnownTokenPackage } from './tokenUtils'; import { getModuleSourceFile } from './moduleResolver'; +import { extractNodesFromTemplateStringLiteral } from './processTemplateStringLiteral'; export interface ExportInfo { declaration: Node; @@ -9,6 +10,7 @@ export interface ExportInfo { moduleSpecifier: string; importExportSpecifierName?: string; valueDeclarationValue?: string; + templateGroups?: Node[][]; } // Resolves an export name to its original declaration, following aliases and re-exports @@ -38,7 +40,7 @@ export function resolveExport( // if we have a template expression, we need to process with extractNodesFromTemplateStringLiteral // and then determine if any of the nodes are known token packages. const initializer = varDeclaration.getInitializer(); - console.log(`getting the type of var declaration ${initializer?.getKindName()}`); + log(`getting the type of var declaration ${initializer?.getKindName()}`); if (Node.isIdentifier(initializer)) { const importSpecifier = initializer.getSymbol()?.getDeclarations().find(Node.isImportSpecifier); if (importSpecifier) { @@ -61,16 +63,32 @@ export function resolveExport( } } } else { - // if we don't have an import specifier, we should check of there's another delcaration and then resolve that as well - // This couuld be a var that points to another var that points to a known token for example. - console.log(`no import specifier found for ${initializer.getText()}, it's a ${initializer.getKindName()}`); + // if we don't have an import specifier, we should check of there's another declaration and then resolve that as well + // This could be a var that points to another var that points to a known token for example. + // Since we haven't encountered this scenario yet, we'll leave this as a log entry + log(`no import specifier found for ${initializer.getText()}, it's a ${initializer.getKindName()}`); } } else if (Node.isTemplateExpression(initializer)) { - console.log(`found template expression ${initializer.getText()}`); + const templates = extractNodesFromTemplateStringLiteral(initializer); + const filteredExpressions = templates.extractedExpressions + .map((group) => { + return group.filter((node) => isNodeToken(node, typeChecker, project)); + }) + .filter((group) => group.length > 0); + if (filteredExpressions.length > 0) { + return { + declaration: initializer, + sourceFile: initializer.getSourceFile(), + moduleSpecifier: '', + importExportSpecifierName: '', + valueDeclarationValue: initializer.getText(), + templateGroups: filteredExpressions, + }; + } + // from here we should filter the nodes to see if any of them are known token packages and then return the groups if they are still present. + // We'll need to filter each node group and then if no nodes in that group are found, we should remove the group. } else if (Node.isPropertyAccessExpression(initializer)) { - console.log( - `found property access ${initializer.getText()}, expression ${initializer.getExpression().getText()}` - ); + log(`found property access ${initializer.getText()}, expression ${initializer.getExpression().getText()}`); const expressionSymbol = initializer.getExpression().getSymbol(); const expressionImportSpecifier = expressionSymbol?.getDeclarations().find(Node.isImportSpecifier); if (expressionImportSpecifier) { @@ -140,3 +158,43 @@ export function resolveExport( return undefined; } } + +// Helper to avoid duplicating logic for property access and identifier token checks +function checkImportSpecifier( + importSpecifier: ImportSpecifier, + checker: TypeChecker, + project: Project, + sourceFilePath: string +): boolean | undefined { + const importDeclaration = importSpecifier.getFirstAncestorByKind(SyntaxKind.ImportDeclaration); + const moduleSpecifier = importDeclaration?.getModuleSpecifierValue(); + const specifierName = importSpecifier.getName(); + if (moduleSpecifier !== undefined && isKnownTokenPackage(moduleSpecifier, specifierName)) { + return true; + } else if (moduleSpecifier !== undefined && ts.isExternalModuleNameRelative(moduleSpecifier)) { + const moduleSourceFile = getModuleSourceFile(project, moduleSpecifier, sourceFilePath); + if (moduleSourceFile) { + // If we have a relative module specifier, we need to resolve it and check if there's a token + console.log( + 'resolver info', + resolveExport(moduleSourceFile, specifierName, checker, project)?.valueDeclarationValue + ); + return !!resolveExport(moduleSourceFile, specifierName, checker, project); + } + } +} + +const isNodeToken = (node: Node, checker: TypeChecker, project: Project): boolean | undefined => { + // Handle property access or identifier uniformly + let importSpecifier; + if (Node.isPropertyAccessExpression(node)) { + const symbol = checker.getSymbolAtLocation(node.getExpression()); + importSpecifier = symbol?.getDeclarations().find(Node.isImportSpecifier); + } else if (Node.isIdentifier(node)) { + const symbol = checker.getSymbolAtLocation(node); + importSpecifier = symbol?.getDeclarations().find(Node.isImportSpecifier); + } + if (importSpecifier) { + return checkImportSpecifier(importSpecifier, checker, project, node.getSourceFile().getFilePath()); + } +}; From 3b2e1806db06c7c9f72f1a78303978e8a44d788b Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 21 May 2025 01:22:31 -0700 Subject: [PATCH 58/75] Fixing deeply nested template expression values --- .../src/__tests__/cssVarE2E.test.ts | 2 +- packages/token-analyzer/src/importAnalyzer.ts | 7 +++- .../token-analyzer/src/reexportResolver.ts | 35 +++++++++++++------ packages/token-analyzer/src/tokenResolver.ts | 4 +-- 4 files changed, 34 insertions(+), 14 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts index 604260a99..e5616d9f0 100644 --- a/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts +++ b/packages/token-analyzer/src/__tests__/cssVarE2E.test.ts @@ -291,7 +291,7 @@ describe('CSS Variable Cross-Module Resolution E2E', () => { // Multiple tokens from a complex var expect.objectContaining({ property: 'padding', - token: ['tokens.colorBrandPrimary'], + token: ['tokens.colorBrandPrimary', 'tokens.colorBrandSecondary'], }), expect.objectContaining({ property: 'marginRight', diff --git a/packages/token-analyzer/src/importAnalyzer.ts b/packages/token-analyzer/src/importAnalyzer.ts index 5887143ef..dc21c7d77 100644 --- a/packages/token-analyzer/src/importAnalyzer.ts +++ b/packages/token-analyzer/src/importAnalyzer.ts @@ -5,6 +5,11 @@ import { resolveExport, ExportInfo } from './reexportResolver'; import { getModuleSourceFile } from './moduleResolver.js'; import { isKnownTokenPackage } from './tokenUtils'; +export interface TemplateGroupItem { + node: Node; + actualTokenValue?: string; +} + /** * Represents a value imported from another module */ @@ -14,7 +19,7 @@ export interface ImportedValue { node: Node; declaredValue?: string; declarationNode?: Node; - templateGroups?: Node[][]; + templateGroups?: TemplateGroupItem[][]; } // Context passed through each import handler for clearer signature diff --git a/packages/token-analyzer/src/reexportResolver.ts b/packages/token-analyzer/src/reexportResolver.ts index 7a5af7573..2de9a9128 100644 --- a/packages/token-analyzer/src/reexportResolver.ts +++ b/packages/token-analyzer/src/reexportResolver.ts @@ -3,6 +3,7 @@ import { log } from './debugUtils.js'; import { isKnownTokenPackage } from './tokenUtils'; import { getModuleSourceFile } from './moduleResolver'; import { extractNodesFromTemplateStringLiteral } from './processTemplateStringLiteral'; +import { TemplateGroupItem } from './importAnalyzer'; export interface ExportInfo { declaration: Node; @@ -10,7 +11,7 @@ export interface ExportInfo { moduleSpecifier: string; importExportSpecifierName?: string; valueDeclarationValue?: string; - templateGroups?: Node[][]; + templateGroups?: TemplateGroupItem[][]; } // Resolves an export name to its original declaration, following aliases and re-exports @@ -72,7 +73,17 @@ export function resolveExport( const templates = extractNodesFromTemplateStringLiteral(initializer); const filteredExpressions = templates.extractedExpressions .map((group) => { - return group.filter((node) => isNodeToken(node, typeChecker, project)); + const newGroup: TemplateGroupItem[] = []; + group.forEach((node) => { + const nodeInfo = isNodeToken(node, typeChecker, project); + if (nodeInfo?.isToken) { + newGroup.push({ + node, + actualTokenValue: nodeInfo.declarationValue, + }); + } + }); + return newGroup; }) .filter((group) => group.length > 0); if (filteredExpressions.length > 0) { @@ -165,26 +176,30 @@ function checkImportSpecifier( checker: TypeChecker, project: Project, sourceFilePath: string -): boolean | undefined { +): { isToken: boolean; declarationValue?: string } | undefined { const importDeclaration = importSpecifier.getFirstAncestorByKind(SyntaxKind.ImportDeclaration); const moduleSpecifier = importDeclaration?.getModuleSpecifierValue(); const specifierName = importSpecifier.getName(); if (moduleSpecifier !== undefined && isKnownTokenPackage(moduleSpecifier, specifierName)) { - return true; + return { isToken: true }; } else if (moduleSpecifier !== undefined && ts.isExternalModuleNameRelative(moduleSpecifier)) { const moduleSourceFile = getModuleSourceFile(project, moduleSpecifier, sourceFilePath); if (moduleSourceFile) { // If we have a relative module specifier, we need to resolve it and check if there's a token - console.log( - 'resolver info', - resolveExport(moduleSourceFile, specifierName, checker, project)?.valueDeclarationValue - ); - return !!resolveExport(moduleSourceFile, specifierName, checker, project); + // If there's a declaration value we should also return that so we don't falsely return the variable name + const resolverInfo = resolveExport(moduleSourceFile, specifierName, checker, project); + if (resolverInfo) { + return { isToken: true, declarationValue: resolverInfo.valueDeclarationValue }; + } } } } -const isNodeToken = (node: Node, checker: TypeChecker, project: Project): boolean | undefined => { +const isNodeToken = ( + node: Node, + checker: TypeChecker, + project: Project +): { isToken: boolean; declarationValue?: string } | undefined => { // Handle property access or identifier uniformly let importSpecifier; if (Node.isPropertyAccessExpression(node)) { diff --git a/packages/token-analyzer/src/tokenResolver.ts b/packages/token-analyzer/src/tokenResolver.ts index b61bd6696..affdd657a 100644 --- a/packages/token-analyzer/src/tokenResolver.ts +++ b/packages/token-analyzer/src/tokenResolver.ts @@ -72,12 +72,12 @@ const processIdentifier = (info: TokenResolverInfo): TokenReference[ if (isTokenReference(info)) { const propertyName = path[path.length - 1] ?? parentName; const importedVal = importedValues.get(text)!; + // our template groups are already processed and we know they are known tokens so we can just add them if (importedVal.templateGroups && importedVal.templateGroups.length > 0) { importedVal.templateGroups.forEach((group) => { const grouped: TokenReference = { property: propertyName, token: [], path }; group.forEach((exprNode) => { - const nestedTokens = resolveToken({ ...info, tokens: [], node: exprNode }); - nestedTokens.forEach((t) => grouped.token.push(...t.token)); + grouped.token.push(exprNode.actualTokenValue ?? exprNode.node.getText()); }); if (grouped.token.length > 0) { returnTokens.push(grouped); From 8fe96a8ab2ddd4bf03254465a0d961409ca82dfd Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 21 May 2025 01:44:00 -0700 Subject: [PATCH 59/75] fixing default export analysis --- packages/token-analyzer/src/reexportResolver.ts | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/packages/token-analyzer/src/reexportResolver.ts b/packages/token-analyzer/src/reexportResolver.ts index 2de9a9128..646d16e7a 100644 --- a/packages/token-analyzer/src/reexportResolver.ts +++ b/packages/token-analyzer/src/reexportResolver.ts @@ -35,6 +35,7 @@ export function resolveExport( if (symbol) { const exportSpecifier = symbol?.getDeclarations().find(Node.isExportSpecifier); const varDeclaration = symbol?.getDeclarations().find(Node.isVariableDeclaration); + const exportAssignment = symbol?.getDeclarations().find(Node.isExportAssignment); if (varDeclaration) { // if we have a simple variable declaration that points to a known token import, we can return it, @@ -163,6 +164,19 @@ export function resolveExport( } } } + if (exportAssignment) { + const exportExpression = exportAssignment.getExpression(); + const tokenInfo = isNodeToken(exportExpression, typeChecker, project); + if (tokenInfo?.isToken) { + return { + declaration: exportAssignment, + sourceFile: exportAssignment.getSourceFile(), + moduleSpecifier: '', + importExportSpecifierName: exportName, + valueDeclarationValue: tokenInfo.declarationValue ?? exportExpression.getText(), + }; + } + } } } catch (e) { log(`Error resolving export ${exportName} in ${sourceFile.getFilePath()}:`, e); From f20c4012c10c36b5f523cff8c8675054d1ed7a29 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 21 May 2025 12:51:14 -0700 Subject: [PATCH 60/75] fixing tsc error --- packages/token-analyzer/src/astAnalyzer.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/token-analyzer/src/astAnalyzer.ts b/packages/token-analyzer/src/astAnalyzer.ts index d72275163..80ed6dd71 100644 --- a/packages/token-analyzer/src/astAnalyzer.ts +++ b/packages/token-analyzer/src/astAnalyzer.ts @@ -160,10 +160,12 @@ function createStyleContent(tokens: TokenReference[]): StyleContent { // this is because if we have more than a single item in our path left there's another level if (token.path.length - 1 - pathIndex > 1) { // Create a nested structure through a recursive call - if (!currentLevel[nestedKey].nested) { - currentLevel[nestedKey].nested = {}; + let cuurrentLevel = currentLevel[nestedKey].nested; + if (!cuurrentLevel) { + cuurrentLevel = {}; } - createNestedStructure(token, pathIndex + 1, currentLevel[nestedKey].nested); + currentLevel[nestedKey].nested = cuurrentLevel; + createNestedStructure(token, pathIndex + 1, cuurrentLevel); } else { currentLevel[nestedKey].tokens.push({ ...token, From a195c30fb83e038c416c509b74986b0d7cf7c6d5 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 21 May 2025 18:33:31 -0700 Subject: [PATCH 61/75] adding fixes for running as CLI exclude storybook files (there shouldn't be any anyways) --- packages/token-analyzer/package.json | 5 +++- packages/token-analyzer/src/index.ts | 28 ++++++++++------------- packages/token-analyzer/tsconfig.lib.json | 6 ++++- 3 files changed, 21 insertions(+), 18 deletions(-) diff --git a/packages/token-analyzer/package.json b/packages/token-analyzer/package.json index eac770586..76c24ee00 100644 --- a/packages/token-analyzer/package.json +++ b/packages/token-analyzer/package.json @@ -10,9 +10,12 @@ "@griffel/react": "^1.5.22" }, "scripts": { - "analyze-tokens": "NODE_OPTIONS=\"--loader ts-node/esm\" ts-node-esm src/index.ts", + "analyze-tokens": "(cd ../.. && nx build token-analyzer) && node ../../dist/packages/token-analyzer/lib-commonjs/index.js", "test": "jest", "test:debug": "node --loader ts-node/esm --inspect-brk node_modules/.bin/jest --runInBand" }, + "bin": { + "token-analyzer": "./index.js" + }, "private": true } diff --git a/packages/token-analyzer/src/index.ts b/packages/token-analyzer/src/index.ts index 4ea3a3756..a216d218a 100644 --- a/packages/token-analyzer/src/index.ts +++ b/packages/token-analyzer/src/index.ts @@ -1,3 +1,4 @@ +#!/usr/bin/env node import { Project } from 'ts-morph'; import { promises as fs } from 'fs'; import { relative } from 'path'; @@ -22,14 +23,12 @@ async function analyzeProjectStyles( const results: AnalysisResults = {}; try { - const styleFiles = await measureAsync('find style files', () => - findStyleFiles(rootDir) - ); + const styleFiles = await measureAsync('find style files', () => findStyleFiles(rootDir)); console.log(`Found ${styleFiles.length} style files to analyze`); const project = new Project({ // Get the nearest tsconfig.json file so we can resolve modules and paths correctly based on the project config - tsConfigFilePath: findTsConfigPath() || '', + tsConfigFilePath: findTsConfigPath(rootDir) || '', skipAddingFilesFromTsConfig: true, skipFileDependencyResolution: false, }); @@ -53,17 +52,14 @@ async function analyzeProjectStyles( if (outputFile) { await measureAsync('write output file', async () => { - const formatted = format( - JSON.stringify(sortObjectByKeys(results), null, 2), - { - parser: 'json', - printWidth: 120, - tabWidth: 2, - singleQuote: true, - trailingComma: 'all', - arrowParens: 'avoid', - } - ); + const formatted = format(JSON.stringify(sortObjectByKeys(results), null, 2), { + parser: 'json', + printWidth: 120, + tabWidth: 2, + singleQuote: true, + trailingComma: 'all', + arrowParens: 'avoid', + }); await fs.writeFile(outputFile, formatted, 'utf8'); console.log(`Analysis written to ${outputFile}`); }); @@ -107,7 +103,7 @@ function countTokens(analysis: FileAnalysis): number { } // CLI execution -const isRunningDirectly = process.argv[1].endsWith('index.ts'); +const isRunningDirectly = process.argv[1].includes('index'); if (isRunningDirectly) { const rootDir = process.argv[2] || '../..'; const outputFile = process.argv[3] || './output.json'; diff --git a/packages/token-analyzer/tsconfig.lib.json b/packages/token-analyzer/tsconfig.lib.json index 884038eac..590b0063b 100644 --- a/packages/token-analyzer/tsconfig.lib.json +++ b/packages/token-analyzer/tsconfig.lib.json @@ -9,6 +9,10 @@ "jest.config.ts", "src/**/*.test.ts", "src/**/*.test.tsx", - "files/**" + "files/**", + "**/*.stories.ts", + "**/*.stories.js", + "**/*.stories.jsx", + "**/*.stories.tsx" ] } From ac53d2e5a660808474eaaf660f3542303d7473ac Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 22 May 2025 17:18:08 -0700 Subject: [PATCH 62/75] match defaults to documentation --- packages/token-analyzer/src/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/token-analyzer/src/index.ts b/packages/token-analyzer/src/index.ts index a216d218a..0ad0d8cbe 100644 --- a/packages/token-analyzer/src/index.ts +++ b/packages/token-analyzer/src/index.ts @@ -105,8 +105,8 @@ function countTokens(analysis: FileAnalysis): number { // CLI execution const isRunningDirectly = process.argv[1].includes('index'); if (isRunningDirectly) { - const rootDir = process.argv[2] || '../..'; - const outputFile = process.argv[3] || './output.json'; + const rootDir = process.argv[2] || './src'; + const outputFile = process.argv[3] || './token-analysis.json'; const debug = process.argv.includes('--debug'); const perf = process.argv.includes('--perf'); From a3558e7a95abe8b18dbbdf5f8331debe207ca5bd Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 22 May 2025 17:18:16 -0700 Subject: [PATCH 63/75] comment clean up --- packages/token-analyzer/src/types.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/token-analyzer/src/types.ts b/packages/token-analyzer/src/types.ts index 5531b8eaf..50f90e371 100644 --- a/packages/token-analyzer/src/types.ts +++ b/packages/token-analyzer/src/types.ts @@ -64,7 +64,6 @@ export type KnownTokenImportsAndModules = { export const knownTokenImportsAndModules: KnownTokenImportsAndModules = { // if we see any imports from the defaults, we assume it's a token. - // @fluentui/tokens is here as a test but should be removed in the future default: ['@fluentui/semantic-tokens'], // begin the known token imports tokens: ['@fluentui/react-theme', '@fluentui/react-components', '@fluentui/tokens'], From 82f944d9dcba985387a9c0714ee738bac9969222 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 22 May 2025 18:15:11 -0700 Subject: [PATCH 64/75] add yargs for better CLI experience update readme add deps and update yarn lock --- package.json | 4 +- packages/token-analyzer/README.md | 200 +++++++++------------------ packages/token-analyzer/package.json | 3 +- packages/token-analyzer/src/index.ts | 55 +++++++- yarn.lock | 189 +++++++++++++------------ 5 files changed, 218 insertions(+), 233 deletions(-) diff --git a/package.json b/package.json index e195f7db8..afecc4a59 100644 --- a/package.json +++ b/package.json @@ -77,6 +77,7 @@ "@types/react-dom": "18.3.0", "@types/react-virtualized-auto-sizer": "^1.0.1", "@types/react-window": "^1.8.5", + "@types/yargs": "^17.0.33", "beachball": "^2.33.2", "eslint": "9.26.0", "eslint-config-prettier": "10.1.5", @@ -105,7 +106,8 @@ "tslib": "^2.3.0", "typescript": "5.7.3", "typescript-eslint": "8.32.1", - "verdaccio": "6.1.2" + "verdaccio": "6.1.1", + "yargs": "^17.7.2" }, "dependencies": {}, "nx": { diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index da659c6d6..ca8fd82b0 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -2,119 +2,111 @@ A static analysis tool that scans your project's style files to track and analyze design token usage. The analyzer helps identify where and how design tokens are being used across your codebase, making it easier to maintain consistency and track token adoption. The data from this tool can also be used to create other tools like theme designers. -## TODO +### How it works + +The tool first scans for the common pattern of `styles` or `style` being in the file name (along with common file extensions). From there it checks all imports to see if there are imports of known tokens. Currently, the list of known tokens and packages are internally maintained but we could easily expose this for extension libraries as well. See [knownTokenImportsAndModules](./src/types.ts#L65) in `types.ts` for the current list. This analysis isn't just done for direct imports but for any re-exports, new variable declarations, template expressions values, etc. This hopefully covers a wide range of scenarios the tool might encounter in code but it's possible there's more edge cases. Please report any issues you find so we can fix them and add new tests. Once this mapping is done, the tool scans for `makeStyles`, `makeResetStyles` and `mergeStyles` to build a comprehensive picture of what styles use which tokens, what meta data is considered when applying the styles and what properties they're applied to. As a result, this tool is targeted towards Griffel based styles for now. Since this tool works off the AST maps the usage of tokens and imports back to their symbols instead of just string analysis which we've found to be quite robust. Once analysis is complete, it outputs a JSON file with the mappings. By default it will produce a single analysis file for a given run. Multiple files are under an object key with their relative path within the JSON file. -- we also need to ensure var analysis is done correctly after the refactor -- Convert token member within the analysis output to an array so we can hold multiple tokens. The order should be the order or priority. [0] being the highest pri with the last item in the array the least prioritized. +## TODO - add config to point to custom prettier config for file output. - add tests for findTsConfigPath -- Update extractTokensFromText to find imported vars and tokens. We've updated it to resolve variable declarations thusfar but there's potential cases where we could have imports impact this as well. -- add tests for structure and output. ~~We're processing the styles but not putting them in the right places right now~~ +- Remove `extractTokensFromText` as we're only using it to help with `getPropertiesForShorthand`, we should leverage the existing analysis for this - update contributing doc with info about version management -- Data Flow - - find all styles files - - get all imports, analyze them for token references or values, return them to the main script flow - - process merge styles and map meta data to styles - - parse through each property of styles - - with each property,we should look at whether an initializer is there, spread, delaration etc and then determine if that's a token. if it is, we also need to see if there's a fallback chain and not just log a token but also log the tokens in the right order (this should also open the door to ensure we don't over complicate or duplicate logic here) - - The data flow complexity is a bit high currently and we should only recurse where we actually need to. - - property set in styles -> analyze type (expression call, initializer, declaration, etc) -> resolve given import information, type and rules -> Once we resolve, analyze if it's a token which should be a single call so we can centralize it -> return token with path, value, etc. This should include priority order if we have a var() fallback structure. -- we need to update isToken to resolve to which package/module it's imported from. - - - We also need to do this for shorthands - - We should write a function that does this from a node and follows it up the import chain. - - Update test to return promise instead of async/await function. -- We need to update import analyzer to handle namespace imports - -- when we resolve imports we store template string literal spans as part of the object but I don't thiink that we need to this. More importantly, we need to maintain a reference to the node for any additional processing, and then the individual initializers and other internal nodes. Clean this up before moving into the ordering as it'll be easier to deal with. We should also ensure this is consistent across template literals. - -- Switch to getImmediatelyAliasedSymbol from getAliasedSymbol. This is because if we have a direct export in a file like `export { someToken } from 'semantic-tokens` in a local file, we won't resolve to the local export location, but the library file, or a d.ts file. This isn't really what we need and we'd missed the actual import from `semantic-tokens` because of this and not correctly mark it. We'd be out of the boundary of our current application. This means we do need to manually walk but it shouldn't be overly complex. We should grab the immediate alias, see if we see a known token package, if not, walk again, etc until we can't anymore. From there if there isn't one we know it's not a token. +- Add ability to customize glob used to find style files +- Add ability to add known tokens +- Read gitignore from target dir and use that for ignore if we find one. (currently hard coded). -- handle when something has an import/export alias. Ex: `import { someToken as blah}`. In that case we have to look at `propertyName` and `name` to get the original and aliased names. - -ExportDeclaration +## Installation -## Notes +```bash +npm install --save-dev @fluentui-contrib/token-analyzer +``` -Get the symbol, find it's declaration, walk up the tree until we find the `ExportDeclaration`. From there grab the module specifier -compare that value to our known list -if it doesn't match, walk again, repeat -save the declaration +or -shortcut analysis if straight import is pointing to a known token package. If not, we can then analyze further with +```bash +yarn add @fluentui-contrib/token-analyzer -D +``` --_ get source file (useButtonStyles.styles.ts) -_ pull all import declarations -_ process each import declaration based on type (named, default, namespace) -_ get the name of each import (if named or namespace) -_ check if the import is from a known package right away -_ if so, we add it and move on +## Usage -- if not we process the file the module specifier resolves to (this could be a package index.ts or local file) (import-test.ts) - - we could have direct exports (re-exports) or we could have imports from other packages and then exports of those values. Either way the first thing we see should - be an export. We need to determine if the export is a full re-export or a declaration (extractValueFromDeclaration does this already). +### Command Line Interface -Since we correctly resolve colorNeutralForeground1, we need to then add a mapping that the symbol resolved back into a known token package. +Run the style analysis tool: -LET'S GET ODSPS AND TEAMS UXEs TO PULL DOWN THE NIGHTLY STUFF AND TRY TO USE SEMANTIC TOKENS ON THEIR CUSTOM COMPONENTS +```bash +npm run analyze-tokens [options] +``` -- could we get anyone to also look at calendar? talk to Jeff about if there's anyone that could take a look? +### Options -## Features +| Option | Alias | Type | Default | Description | +| ----------- | ----- | ------- | ----------------------- | ------------------------------------- | +| `--root` | `-r` | string | `./src` | Root directory to analyze | +| `--output` | `-o` | string | `./token-analysis.json` | Output file path for results | +| `--debug` | `-d` | boolean | `false` | Enable debug mode for verbose logging | +| `--perf` | `-p` | boolean | `false` | Enable performance tracking | +| `--help` | `-h` | - | - | Show help information | +| `--version` | - | - | - | Show version number | -- Scans TypeScript/JavaScript style files for token usage -- Tracks both direct token references and variables that reference tokens -- Follows imports to resolve token references across files -- Generates detailed JSON reports of token usage -- Performance tracking and debugging capabilities -- Handles nested style objects and property assignments +### Examples -## Installation +**Basic usage (uses defaults):** ```bash -npm install --save-dev @fluentui-contrib/token-analyzer +npm run analyze-tokens ``` -## Usage +**Custom directory and output:** -### Via CLI +```bash +npm run analyze-tokens -- --root ./components --output ./analysis-results.json +``` -The analyzer can be run from the command line: +**With debugging and performance tracking:** ```bash -npm run analyze-tokens -- [sourceDir] [outputFile] [flags] +npm run analyze-tokens -- --root ./src/components --debug --perf ``` -#### Arguments: +### Getting Help -- `sourceDir`: Directory to analyze (default: `./src`) -- `outputFile`: Output JSON file path (default: `./token-analysis.json`) +View all available options and examples: -#### Flags: - -- `--debug`: Enable debug logging -- `--perf`: Enable performance metrics +```bash +npm run analyze-tokens --help +# or +npm run analyze-tokens -h +``` -Examples: +View version information: ```bash -# Analyze src directory with default output -npm run analyze-tokens - -# Analyze specific directory with custom output -npm run analyze-tokens -- ./components ./analysis.json +npm run analyze-tokens --version +``` -# Run with debug logging -npm run analyze-tokens -- --debug +### Output -# Run with performance metrics -npm run analyze-tokens -- --perf +The tool will display progress information and a summary: -# Run with both debug and performance tracking -npm run analyze-tokens -- --debug --perf ``` +Starting analysis of ./src +Output will be written to ./token-analysis.json +Debug mode enabled +Performance tracking enabled + +Analysis complete! +Processed 23 files containing styles +Found 156 token references +``` + +Results are saved as JSON to the specified output file, containing detailed analysis of each file's style usage and token references. ### Programmatic Usage ```typescript -import { analyzeProjectStyles } from '@your-org/token-analyzer'; +import { analyzeProjectStyles } from '@fluentui-contrib/token-analyzer'; async function analyze() { const results = await analyzeProjectStyles('./src', './analysis.json', { @@ -139,56 +131,9 @@ Debug and performance tracking can be configured via: 1. CLI flags (as shown above) 2. Programmatic options when calling `analyzeProjectStyles` -3. Environment variables: - - `TOKEN_ANALYZER_DEBUG=true` - - `TOKEN_ANALYZER_PERF=true` - -## Output Format - -The analyzer generates a JSON file with the following structure: - -```typescript -{ - "path/to/file.ts": { - "styleName": { - "tokens": [ - { - "property": "color", - "token": "tokens.colors.primary", - "fromVariable": false // true if reference comes from a variable - } - ], - "nested": { - "hover": { - "tokens": [ - { - "property": "backgroundColor", - "token": "tokens.colors.secondary", - "fromVariable": true, - "sourceFile": "path/to/variables.ts" // only present for variable references - } - ] - } - } - } - } -} -``` ## Development -### Project Structure - -``` -src/ - ├── index.ts # Main entry point - ├── astAnalyzer.ts # AST analysis logic - ├── fileOperations.ts # File handling utilities - ├── formatter.ts # Output formatting - ├── debugUtils.ts # Debug and performance utilities - └── types.ts # TypeScript type definitions -``` - ### Running Tests ```bash @@ -201,23 +146,6 @@ npm test npm run build ``` -## Pending Improvements - -- [ ] Add more granular performance metrics -- [ ] Implement different levels of debug logging -- [ ] Add output format customization -- [ ] Add parallel processing options -- [ ] Add token pattern customization -- [ ] Add file pattern customization - -## Contributing - -1. Fork the repository -2. Create your feature branch (`git checkout -b feature/amazing-feature`) -3. Commit your changes (`git commit -m 'Add some amazing feature'`) -4. Push to the branch (`git push origin feature/amazing-feature`) -5. Open a Pull Request - ## License This project is licensed under the MIT License - see the LICENSE file for details. diff --git a/packages/token-analyzer/package.json b/packages/token-analyzer/package.json index 76c24ee00..a4283e2be 100644 --- a/packages/token-analyzer/package.json +++ b/packages/token-analyzer/package.json @@ -7,7 +7,8 @@ "ts-morph": "^24.0.0", "typescript": "5.7.3", "prettier": "^2.6.2", - "@griffel/react": "^1.5.22" + "@griffel/react": "^1.5.22", + "yargs": "^17.7.2" }, "scripts": { "analyze-tokens": "(cd ../.. && nx build token-analyzer) && node ../../dist/packages/token-analyzer/lib-commonjs/index.js", diff --git a/packages/token-analyzer/src/index.ts b/packages/token-analyzer/src/index.ts index 0ad0d8cbe..f1857d741 100644 --- a/packages/token-analyzer/src/index.ts +++ b/packages/token-analyzer/src/index.ts @@ -8,6 +8,8 @@ import { analyzeFile } from './astAnalyzer.js'; import { AnalysisResults, FileAnalysis } from './types.js'; import { configure, log, error, measureAsync } from './debugUtils.js'; import { findTsConfigPath } from './findTsConfigPath.js'; +import { hideBin } from 'yargs/helpers'; +import yargs from 'yargs/yargs'; async function analyzeProjectStyles( rootDir: string, @@ -102,15 +104,60 @@ function countTokens(analysis: FileAnalysis): number { return count; } +// Define the expected CLI arguments interface +interface CliArgs { + root: string; + output: string; + debug: boolean; + perf: boolean; +} + // CLI execution const isRunningDirectly = process.argv[1].includes('index'); if (isRunningDirectly) { - const rootDir = process.argv[2] || './src'; - const outputFile = process.argv[3] || './token-analysis.json'; - const debug = process.argv.includes('--debug'); - const perf = process.argv.includes('--perf'); + const argv = yargs(hideBin(process.argv)) + .usage('$0 [options]', 'Analyze project styles and token usage') + .option('root', { + alias: 'r', + describe: 'Root directory to analyze', + type: 'string', + default: './src', + }) + .option('output', { + alias: 'o', + describe: 'Output file path', + type: 'string', + default: './token-analysis.json', + }) + .option('debug', { + alias: 'd', + describe: 'Enable debug mode', + type: 'boolean', + default: false, + }) + .option('perf', { + alias: 'p', + describe: 'Enable performance tracking', + type: 'boolean', + default: false, + }) + .example('$0', 'Run with default settings') + .example('$0 --root ./components --output ./results.json', 'Analyze components directory') + .example('$0 -r ./src -o ./analysis.json --debug', 'Run with debug mode') + .help('h') + .alias('h', 'help') + .version() + .strict() + .parseSync() as CliArgs; + + const { root: rootDir, output: outputFile, debug, perf } = argv; console.log(`Starting analysis of ${rootDir}`); + console.log(`Output will be written to ${outputFile}`); + + if (debug) console.log('Debug mode enabled'); + if (perf) console.log('Performance tracking enabled'); + analyzeProjectStyles(rootDir, outputFile, { debug, perf }) .then((results) => { const totalFiles = Object.keys(results).length; diff --git a/yarn.lock b/yarn.lock index 70127f0d0..419491414 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6290,6 +6290,13 @@ dependencies: "@types/yargs-parser" "*" +"@types/yargs@^17.0.33": + version "17.0.33" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.33.tgz#8c32303da83eec050a84b3c7ae7b9f922d13e32d" + integrity sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA== + dependencies: + "@types/yargs-parser" "*" + "@types/yargs@^17.0.8": version "17.0.24" resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.24.tgz#b3ef8d50ad4aa6aecf6ddc97c580a00f5aa11902" @@ -6378,19 +6385,19 @@ "@typescript-eslint/types" "8.32.1" eslint-visitor-keys "^4.2.0" -"@verdaccio/auth@8.0.0-next-8.15": - version "8.0.0-next-8.15" - resolved "https://registry.yarnpkg.com/@verdaccio/auth/-/auth-8.0.0-next-8.15.tgz#9516255e9c1a096c018e75137af683f8351921f9" - integrity sha512-vAfzGOHbPcPXMCI90jqm/qSZ1OUBnOGzudZA3+YtherncdwADekvXbdJlZVclcfmZ0sRbfVG5Xpf88aETiwfcw== +"@verdaccio/auth@8.0.0-next-8.14": + version "8.0.0-next-8.14" + resolved "https://registry.yarnpkg.com/@verdaccio/auth/-/auth-8.0.0-next-8.14.tgz#79caa7ffdf4119a0dcf7708cdb4514ed050bc694" + integrity sha512-OOuPVsfo1Y+/84pxS1KMSTtf5sc67PfNByzhBwC8cX4E1WiArHuP+rKfEqqCLu4NaN1BVNW4HiqCbZ1GzDyWtQ== dependencies: - "@verdaccio/config" "8.0.0-next-8.15" - "@verdaccio/core" "8.0.0-next-8.15" + "@verdaccio/config" "8.0.0-next-8.14" + "@verdaccio/core" "8.0.0-next-8.14" "@verdaccio/loaders" "8.0.0-next-8.6" - "@verdaccio/signature" "8.0.0-next-8.7" - "@verdaccio/utils" "8.1.0-next-8.15" + "@verdaccio/signature" "8.0.0-next-8.6" + "@verdaccio/utils" "8.1.0-next-8.14" debug "4.4.0" lodash "4.17.21" - verdaccio-htpasswd "13.0.0-next-8.15" + verdaccio-htpasswd "13.0.0-next-8.14" "@verdaccio/commons-api@10.2.0": version "10.2.0" @@ -6400,22 +6407,22 @@ http-errors "2.0.0" http-status-codes "2.2.0" -"@verdaccio/config@8.0.0-next-8.15": - version "8.0.0-next-8.15" - resolved "https://registry.yarnpkg.com/@verdaccio/config/-/config-8.0.0-next-8.15.tgz#de37c2e3c50bad236173d4c0adfbebc52f863fae" - integrity sha512-oEzQB+xeqaFAy54veMshqpt1hlZCYNkqoKuwkt7O8J43Fo/beiLluKUVneXckzi+pg1yvvGT7lNCbvuUQrxxQg== +"@verdaccio/config@8.0.0-next-8.14": + version "8.0.0-next-8.14" + resolved "https://registry.yarnpkg.com/@verdaccio/config/-/config-8.0.0-next-8.14.tgz#de9d2b45e56103bd2e4e7886952d781085a82f62" + integrity sha512-5YOiYfh9a0q+9ARU4NYnfl1YJunNKfyz1x2ONEhvmHkP11E782ep7ZtrJhgg/ZJTtOmjbCspgw6FN3WBe3WlLQ== dependencies: - "@verdaccio/core" "8.0.0-next-8.15" - "@verdaccio/utils" "8.1.0-next-8.15" + "@verdaccio/core" "8.0.0-next-8.14" + "@verdaccio/utils" "8.1.0-next-8.14" debug "4.4.0" js-yaml "4.1.0" lodash "4.17.21" minimatch "7.4.6" -"@verdaccio/core@8.0.0-next-8.15": - version "8.0.0-next-8.15" - resolved "https://registry.yarnpkg.com/@verdaccio/core/-/core-8.0.0-next-8.15.tgz#8041767fb7ecb844476f9c6f253b9536b1aa69af" - integrity sha512-d5r/ZSkCri7s1hvV35enptquV5LJ81NqMYJnsjuryIUnvwn1yaqLlcdd6zIL08unzCSr7qDdUAdwGRRm6PKzng== +"@verdaccio/core@8.0.0-next-8.14": + version "8.0.0-next-8.14" + resolved "https://registry.yarnpkg.com/@verdaccio/core/-/core-8.0.0-next-8.14.tgz#2f0e190e471065cd48ed1bdd0a6ec0bb58254986" + integrity sha512-qj4KLkaVfMzqhDFn9xqpRvJQglalmgSJig0h8BzgI/fXQWlhmC0JI2TP0cJRYEhALN3UqjDAQQHKtqa3FqlfSw== dependencies: ajv "8.17.1" core-js "3.40.0" @@ -6460,12 +6467,12 @@ lowdb "1.0.0" mkdirp "1.0.4" -"@verdaccio/logger-commons@8.0.0-next-8.15": - version "8.0.0-next-8.15" - resolved "https://registry.yarnpkg.com/@verdaccio/logger-commons/-/logger-commons-8.0.0-next-8.15.tgz#d15b66c2be2d287731fa88cbe3a87b035f35eae1" - integrity sha512-nF7VgBC2cl5ufv+mZEwBHHyZFb1F0+kVkuRMf3Tyk+Qp4lXilC9MRZ0oc+RnzsDbNmJ6IZHgHNbs6aJrNfaRGg== +"@verdaccio/logger-commons@8.0.0-next-8.14": + version "8.0.0-next-8.14" + resolved "https://registry.yarnpkg.com/@verdaccio/logger-commons/-/logger-commons-8.0.0-next-8.14.tgz#92c15a1b0f62fd74b16c541a29d57281e0e0f520" + integrity sha512-NMRnYg2tWOOvyds5JwjWCKuGWVC5wvK/PIp0pCYQKsB8SsTnBAy1/9XL2gGBSKRkYju7CFO50Q8lRwQMn6k48A== dependencies: - "@verdaccio/core" "8.0.0-next-8.15" + "@verdaccio/core" "8.0.0-next-8.14" "@verdaccio/logger-prettify" "8.0.0-next-8.2" colorette "2.0.20" debug "4.4.0" @@ -6481,23 +6488,23 @@ pino-abstract-transport "1.2.0" sonic-boom "3.8.1" -"@verdaccio/logger@8.0.0-next-8.15": - version "8.0.0-next-8.15" - resolved "https://registry.yarnpkg.com/@verdaccio/logger/-/logger-8.0.0-next-8.15.tgz#da0b3a95684bc20a37fd396f5a4692abb79d58bc" - integrity sha512-3gjhqvB87JUNDHFMN3YG4IweS9EgbCpAWZatNYzcoIWOoGiEaFQQBSM592CaFiI0yf8acyqWkNa1V95L1NMbRg== +"@verdaccio/logger@8.0.0-next-8.14": + version "8.0.0-next-8.14" + resolved "https://registry.yarnpkg.com/@verdaccio/logger/-/logger-8.0.0-next-8.14.tgz#ba2dd88207b17e20542710cd42ea0ae0c60f7eab" + integrity sha512-gddIKMRKDdtbnoCr9R9Yg38gD/lsydZCaehdaTVuolVU9y8iJLZtfjwjU0i2Jko8Qloc8p0CRifSnZhT4fiOug== dependencies: - "@verdaccio/logger-commons" "8.0.0-next-8.15" + "@verdaccio/logger-commons" "8.0.0-next-8.14" pino "9.6.0" -"@verdaccio/middleware@8.0.0-next-8.15": - version "8.0.0-next-8.15" - resolved "https://registry.yarnpkg.com/@verdaccio/middleware/-/middleware-8.0.0-next-8.15.tgz#48bf1cbf75973d775fc105d080d0e1b374ac03e6" - integrity sha512-xsCLGbnhqcYwE8g/u9wxNLfDcESpr9ptEZ8Ce7frVTphU7kYIL48QCDPMzug7U+AguNtCq4v4zcoY1PaOQ8mgw== +"@verdaccio/middleware@8.0.0-next-8.14": + version "8.0.0-next-8.14" + resolved "https://registry.yarnpkg.com/@verdaccio/middleware/-/middleware-8.0.0-next-8.14.tgz#2494707425cb0619cfa6251a05dc10d2b64beff9" + integrity sha512-ieYP4TJ2jaBR23NUDwYlVJYh9rLx3Zi3NdKIc5snbNY1IVMXd4O9j+Wl3FBwibZIvHbuXRRVFk3c32irPPu0KA== dependencies: - "@verdaccio/config" "8.0.0-next-8.15" - "@verdaccio/core" "8.0.0-next-8.15" - "@verdaccio/url" "13.0.0-next-8.15" - "@verdaccio/utils" "8.1.0-next-8.15" + "@verdaccio/config" "8.0.0-next-8.14" + "@verdaccio/core" "8.0.0-next-8.14" + "@verdaccio/url" "13.0.0-next-8.14" + "@verdaccio/utils" "8.1.0-next-8.14" debug "4.4.0" express "4.21.2" express-rate-limit "5.5.1" @@ -6510,12 +6517,12 @@ resolved "https://registry.yarnpkg.com/@verdaccio/search-indexer/-/search-indexer-8.0.0-next-8.4.tgz#2ee936cc9af406479bb1c58dae2a72c773861a2c" integrity sha512-Oea9m9VDqdlDPyQ9+fpcxZk0sIYH2twVK+YbykHpSYpjZRzz9hJfIr/uUwAgpWq83zAl2YDbz4zR3TjzjrWQig== -"@verdaccio/signature@8.0.0-next-8.7": - version "8.0.0-next-8.7" - resolved "https://registry.yarnpkg.com/@verdaccio/signature/-/signature-8.0.0-next-8.7.tgz#faebf6268831f6eaac51d7111eb164e0317c9f77" - integrity sha512-sqP+tNzUtVIwUtt1ZHwYoxsO3roDLK7GW8c8Hj0SNaON+9ele9z4NBhaor+g95zRuLy6xtw/RgOvpyLon/vPrA== +"@verdaccio/signature@8.0.0-next-8.6": + version "8.0.0-next-8.6" + resolved "https://registry.yarnpkg.com/@verdaccio/signature/-/signature-8.0.0-next-8.6.tgz#f840e034f143a9fde311b920368fd9b42e05198e" + integrity sha512-fIYC0mluIUmQBN47SYrZ1+I0UquSbAiGV5+n3ime4gyGJjDoDE28yB7BkqD869FOypFTjzzCMsxN0fuRU94UYg== dependencies: - "@verdaccio/config" "8.0.0-next-8.15" + "@verdaccio/config" "8.0.0-next-8.14" debug "4.4.0" jsonwebtoken "9.0.2" @@ -6524,40 +6531,40 @@ resolved "https://registry.yarnpkg.com/@verdaccio/streams/-/streams-10.2.1.tgz#9443d24d4f17672b8f8c8e147690557918ed2bcb" integrity sha512-OojIG/f7UYKxC4dYX8x5ax8QhRx1b8OYUAMz82rUottCuzrssX/4nn5QE7Ank0DUSX3C9l/HPthc4d9uKRJqJQ== -"@verdaccio/tarball@13.0.0-next-8.15": - version "13.0.0-next-8.15" - resolved "https://registry.yarnpkg.com/@verdaccio/tarball/-/tarball-13.0.0-next-8.15.tgz#9d043730dc36ecfd329501d22670480cdee76d8e" - integrity sha512-oSNmq7zD/iPIC5HpJbOJjW/lb0JV9k3jLwI6sG7kPgm+UIxVAOV4fKQOAD18HpHl/WjkF247NA6zGlAB94Habw== +"@verdaccio/tarball@13.0.0-next-8.14": + version "13.0.0-next-8.14" + resolved "https://registry.yarnpkg.com/@verdaccio/tarball/-/tarball-13.0.0-next-8.14.tgz#47cce274177b1bb7034c5cfd52bc221ab5485e34" + integrity sha512-7da3X1L+AsswI0RPOor3sWoLU4zl611Qgnms3mOBF+Y7pNwHF1KsrkXBM5aV/sxr0+FYsrU2bDrxHKg5aCWi0w== dependencies: - "@verdaccio/core" "8.0.0-next-8.15" - "@verdaccio/url" "13.0.0-next-8.15" - "@verdaccio/utils" "8.1.0-next-8.15" + "@verdaccio/core" "8.0.0-next-8.14" + "@verdaccio/url" "13.0.0-next-8.14" + "@verdaccio/utils" "8.1.0-next-8.14" debug "4.4.0" gunzip-maybe "^1.4.2" lodash "4.17.21" tar-stream "^3.1.7" -"@verdaccio/ui-theme@8.0.0-next-8.15": - version "8.0.0-next-8.15" - resolved "https://registry.yarnpkg.com/@verdaccio/ui-theme/-/ui-theme-8.0.0-next-8.15.tgz#4eac7390b94fd4b8578f7165062f559c083391fc" - integrity sha512-k9BAM7rvbUqB2JPReNgXKUVTzBkdmIrNw0f6/7uyO+9cp7eVuarrPBnVF0oMc7jzVNBZRCpUksrhMZ0KwDZTpw== +"@verdaccio/ui-theme@8.0.0-next-8.14": + version "8.0.0-next-8.14" + resolved "https://registry.yarnpkg.com/@verdaccio/ui-theme/-/ui-theme-8.0.0-next-8.14.tgz#2de0cf58879b085ceeb9213830707afa8a9a799f" + integrity sha512-BW61qb3EGIoZrzb2AhrAqnpuim5Me7xFCR8VQPgJIya7z/zVhd+b3t4gnaOCowPnXgGrAvIyWuhVg92hnDh9pQ== -"@verdaccio/url@13.0.0-next-8.15": - version "13.0.0-next-8.15" - resolved "https://registry.yarnpkg.com/@verdaccio/url/-/url-13.0.0-next-8.15.tgz#29c67bd865b7e362bf63cde74d3753d26a3ef418" - integrity sha512-1N/dGhw7cZMhupf/Xlm73beiL3oCaAiyo9DTumjF3aTcJnipVcT1hoj6CSj9RIX54824rUK9WVmo83dk0KPnjw== +"@verdaccio/url@13.0.0-next-8.14": + version "13.0.0-next-8.14" + resolved "https://registry.yarnpkg.com/@verdaccio/url/-/url-13.0.0-next-8.14.tgz#675d69a13e908cfe00c579c4e77e1479d3e18733" + integrity sha512-3DOPuaZAPSXh2tyYr0L6CSI9Gbd8qmWA5d4WtxH7yxVbRIEloiFOINBy4NaBZVtwfACANG22OtEd1mdvGjHcvA== dependencies: - "@verdaccio/core" "8.0.0-next-8.15" + "@verdaccio/core" "8.0.0-next-8.14" debug "4.4.0" lodash "4.17.21" validator "13.12.0" -"@verdaccio/utils@8.1.0-next-8.15": - version "8.1.0-next-8.15" - resolved "https://registry.yarnpkg.com/@verdaccio/utils/-/utils-8.1.0-next-8.15.tgz#d37d5d43ad8f5fded8cc1df6411d5babba6f461d" - integrity sha512-efg/bunOUMVXV+MlljJCrpuT+OQRrQS4wJyGL92B3epUGlgZ8DXs+nxN5v59v1a6AocAdSKwHgZS0g9txmBhOg== +"@verdaccio/utils@8.1.0-next-8.14": + version "8.1.0-next-8.14" + resolved "https://registry.yarnpkg.com/@verdaccio/utils/-/utils-8.1.0-next-8.14.tgz#904362ee83551efc7b2473ea32efef78e8ef5235" + integrity sha512-ylEpOi1JD4NJeLqSunABsnmqZOAXyS3H5s2ysHnA68TNvfV/CCZwhinRNyjxiedOzJeZhSowiWwFynBD/f6dew== dependencies: - "@verdaccio/core" "8.0.0-next-8.15" + "@verdaccio/core" "8.0.0-next-8.14" lodash "4.17.21" minimatch "7.4.6" semver "7.7.1" @@ -17181,23 +17188,23 @@ vary@^1, vary@^1.1.2, vary@~1.1.2: resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== -verdaccio-audit@13.0.0-next-8.15: - version "13.0.0-next-8.15" - resolved "https://registry.yarnpkg.com/verdaccio-audit/-/verdaccio-audit-13.0.0-next-8.15.tgz#1a759de5793169ca1c92a7d01a480d56c85513f5" - integrity sha512-Aeau0u0fi5l4PoSDyOV6glz2FDO9+ofvogJIELV4H6fhDXhgPc2MnoKuaUgOT//khESLle/a6YfcLY2/KNLs6g== +verdaccio-audit@13.0.0-next-8.14: + version "13.0.0-next-8.14" + resolved "https://registry.yarnpkg.com/verdaccio-audit/-/verdaccio-audit-13.0.0-next-8.14.tgz#2637f5fd71fa70a5ad6bee705ff633735f441c6b" + integrity sha512-btV62SbeBDtjdWsZN8RhzSE9ef89yYtdGWeRNSlYTwB/1qvQS//GhNWpUQvD83fPDUMrfo2o7tvmzIML6kOCNQ== dependencies: - "@verdaccio/config" "8.0.0-next-8.15" - "@verdaccio/core" "8.0.0-next-8.15" + "@verdaccio/config" "8.0.0-next-8.14" + "@verdaccio/core" "8.0.0-next-8.14" express "4.21.2" https-proxy-agent "5.0.1" node-fetch cjs -verdaccio-htpasswd@13.0.0-next-8.15: - version "13.0.0-next-8.15" - resolved "https://registry.yarnpkg.com/verdaccio-htpasswd/-/verdaccio-htpasswd-13.0.0-next-8.15.tgz#59cd659a45cba3a8f592943b32be8b9a7d4ddc11" - integrity sha512-rQg5oZ/rReDAM4g4W68hvtzReTbM6vduvVtobHsQxhbtbotEuUjP6O8uaROYtgZ60giGva5Tub2SOm2T9Ln9Dw== +verdaccio-htpasswd@13.0.0-next-8.14: + version "13.0.0-next-8.14" + resolved "https://registry.yarnpkg.com/verdaccio-htpasswd/-/verdaccio-htpasswd-13.0.0-next-8.14.tgz#02cc8294cf285db2ef63634ca18f156cc9f4fa82" + integrity sha512-JOXlm1g6MrZgOhzvgQfTNBmmstMJVTSiXmXbWCxSRjTzRX/v1WIdhwIWEzhn+lJUEWKv+1Z17dSSk6NiVZW/8Q== dependencies: - "@verdaccio/core" "8.0.0-next-8.15" + "@verdaccio/core" "8.0.0-next-8.14" "@verdaccio/file-locking" "13.0.0-next-8.3" apache-md5 "1.1.8" bcryptjs "2.4.3" @@ -17206,26 +17213,26 @@ verdaccio-htpasswd@13.0.0-next-8.15: http-errors "2.0.0" unix-crypt-td-js "1.1.4" -verdaccio@6.1.2: - version "6.1.2" - resolved "https://registry.yarnpkg.com/verdaccio/-/verdaccio-6.1.2.tgz#3862491cddc6bb127a2458e49da4412af5385407" - integrity sha512-HQCquycSQkA+tKRVqMjIVRzmhzTciLfScvKIhhiwZZ9Qd13e2KJQTOdB7QrSacfJuPpl94TA5EZ7XmVRQKk3ag== +verdaccio@6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/verdaccio/-/verdaccio-6.1.1.tgz#44de63550ff6aa6b81e287a312a974b01aa0be1b" + integrity sha512-kemqW6Y/VA2Z7I8Dn3seyMzZNlnIQaP8l9mtHKriW26fxlAD2K+9esSTn8gsZej038Z7sbqpy1RFJNOZ5BPr0w== dependencies: "@cypress/request" "3.0.8" - "@verdaccio/auth" "8.0.0-next-8.15" - "@verdaccio/config" "8.0.0-next-8.15" - "@verdaccio/core" "8.0.0-next-8.15" + "@verdaccio/auth" "8.0.0-next-8.14" + "@verdaccio/config" "8.0.0-next-8.14" + "@verdaccio/core" "8.0.0-next-8.14" "@verdaccio/loaders" "8.0.0-next-8.6" "@verdaccio/local-storage-legacy" "11.0.2" - "@verdaccio/logger" "8.0.0-next-8.15" - "@verdaccio/middleware" "8.0.0-next-8.15" + "@verdaccio/logger" "8.0.0-next-8.14" + "@verdaccio/middleware" "8.0.0-next-8.14" "@verdaccio/search-indexer" "8.0.0-next-8.4" - "@verdaccio/signature" "8.0.0-next-8.7" + "@verdaccio/signature" "8.0.0-next-8.6" "@verdaccio/streams" "10.2.1" - "@verdaccio/tarball" "13.0.0-next-8.15" - "@verdaccio/ui-theme" "8.0.0-next-8.15" - "@verdaccio/url" "13.0.0-next-8.15" - "@verdaccio/utils" "8.1.0-next-8.15" + "@verdaccio/tarball" "13.0.0-next-8.14" + "@verdaccio/ui-theme" "8.0.0-next-8.14" + "@verdaccio/url" "13.0.0-next-8.14" + "@verdaccio/utils" "8.1.0-next-8.14" JSONStream "1.3.5" async "3.2.6" clipanion "4.0.0-rc.4" @@ -17241,8 +17248,8 @@ verdaccio@6.1.2: mkdirp "1.0.4" pkginfo "0.4.1" semver "7.6.3" - verdaccio-audit "13.0.0-next-8.15" - verdaccio-htpasswd "13.0.0-next-8.15" + verdaccio-audit "13.0.0-next-8.14" + verdaccio-htpasswd "13.0.0-next-8.14" verror@1.10.0: version "1.10.0" @@ -17746,7 +17753,7 @@ yargs@^15.0.2: y18n "^4.0.0" yargs-parser "^18.1.2" -yargs@^17.3.1, yargs@^17.6.2: +yargs@^17.3.1, yargs@^17.6.2, yargs@^17.7.2: version "17.7.2" resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== From c8eb338712ec235f02ebbe4e75db65a9bae6816b Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 23 May 2025 12:37:39 -0700 Subject: [PATCH 65/75] removing test output file. This will be generated by tests only now. --- .../src/__tests__/test-files/analysis.json | 1005 ----------------- 1 file changed, 1005 deletions(-) delete mode 100644 packages/token-analyzer/src/__tests__/test-files/analysis.json diff --git a/packages/token-analyzer/src/__tests__/test-files/analysis.json b/packages/token-analyzer/src/__tests__/test-files/analysis.json deleted file mode 100644 index c7b9595eb..000000000 --- a/packages/token-analyzer/src/__tests__/test-files/analysis.json +++ /dev/null @@ -1,1005 +0,0 @@ -{ - "useButtonStyles.styles.ts": { - "styles": { - "useRootBaseClassName": { - "resetStyles": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorNeutralBackground1"], - "path": ["backgroundColor"] - }, - { - "property": "color", - "token": ["semanticTokens.cornerFlyoutRest"], - "path": ["color"] - }, - { - "property": "border", - "token": ["tokens.strokeWidthThin"], - "path": ["border"] - }, - { - "property": "border", - "token": ["tokens.colorNeutralStroke1"], - "path": ["border"] - }, - { - "property": "fontFamily", - "token": ["textStyleAiHeaderFontfamily"], - "path": ["fontFamily"] - }, - { - "property": "padding", - "token": ["tokens.spacingHorizontalM"], - "path": ["padding"] - }, - { - "property": "borderRadius", - "token": ["tokens.borderRadiusMedium"], - "path": ["borderRadius"] - }, - { - "property": "fontSize", - "token": ["tokens.fontSizeBase300"], - "path": ["fontSize"] - }, - { - "property": "fontWeight", - "token": ["tokens.fontWeightSemibold"], - "path": ["fontWeight"] - }, - { - "property": "lineHeight", - "token": ["tokens.lineHeightBase300"], - "path": ["lineHeight"] - }, - { - "property": "transitionDuration", - "token": ["tokens.durationFaster"], - "path": ["transitionDuration"] - }, - { - "property": "transitionTimingFunction", - "token": ["tokens.curveEasyEase"], - "path": ["transitionTimingFunction"] - } - ], - "nested": { - "':hover'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["cornerCtrlLgHoverRaw"], - "path": ["':hover'", "backgroundColor"] - }, - { - "property": "borderColor", - "token": ["ctrlLinkForegroundBrandHover"], - "path": ["':hover'", "borderColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForeground1Hover"], - "path": ["':hover'", "color"] - } - ] - }, - "':hover:active'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorNeutralBackground1Pressed"], - "path": ["':hover:active'", "backgroundColor"] - }, - { - "property": "borderColor", - "token": ["tokens.colorNeutralStroke1Pressed"], - "path": ["':hover:active'", "borderColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForeground1Pressed"], - "path": ["':hover:active'", "color"] - } - ] - }, - ":focus": { - "tokens": [ - { - "property": "borderColor", - "token": ["tokens.colorStrokeFocus2"], - "path": [":focus", "borderColor"] - }, - { - "property": "borderRadius", - "token": ["tokens.borderRadiusMedium"], - "path": [":focus", "borderRadius"] - }, - { - "property": "outline", - "token": ["tokens.strokeWidthThick"], - "path": [":focus", "outline"] - }, - { - "property": "outline", - "token": ["tokens.colorTransparentStroke"], - "path": [":focus", "outline"] - }, - { - "property": "boxShadow", - "token": ["tokens.strokeWidthThin"], - "path": [":focus", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.colorStrokeFocus2"], - "path": [":focus", "boxShadow"] - } - ] - }, - "'@supports (-moz-appearance:button)'": { - "tokens": [], - "nested": { - ":focus": { - "tokens": [ - { - "property": "boxShadow", - "token": ["tokens.strokeWidthThin"], - "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.colorStrokeFocus2"], - "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] - } - ] - } - } - } - }, - "isResetStyles": true, - "assignedVariables": ["rootBaseClassName"] - } - }, - "useIconBaseClassName": { - "resetStyles": { - "tokens": [ - { - "property": "[iconSpacingVar]", - "token": ["tokens.spacingHorizontalSNudge"], - "path": ["[iconSpacingVar]"] - } - ], - "nested": {}, - "isResetStyles": true, - "assignedVariables": ["iconBaseClassName"] - } - }, - "useRootStyles": { - "outline": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["backgroundColor"] - } - ], - "nested": { - "':hover'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackgroundHover"], - "path": ["':hover'", "backgroundColor"] - } - ] - }, - "':hover:active'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackgroundPressed"], - "path": ["':hover:active'", "backgroundColor"] - } - ] - } - }, - "assignedVariables": ["rootStyles"] - }, - "primary": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorBrandBackground"], - "path": ["backgroundColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForegroundOnBrand"], - "path": ["color"] - } - ], - "nested": { - "':hover'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorBrandBackgroundHover"], - "path": ["':hover'", "backgroundColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForegroundOnBrand"], - "path": ["':hover'", "color"] - } - ] - }, - "':hover:active'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorBrandBackgroundPressed"], - "path": ["':hover:active'", "backgroundColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForegroundOnBrand"], - "path": ["':hover:active'", "color"] - } - ] - } - }, - "assignedVariables": ["rootStyles"] - }, - "subtle": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorSubtleBackground"], - "path": ["backgroundColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForeground2"], - "path": ["color"] - } - ], - "nested": { - "':hover'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorSubtleBackgroundHover"], - "path": ["':hover'", "backgroundColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForeground2Hover"], - "path": ["':hover'", "color"] - } - ], - "nested": { - "[`& .${buttonClassNames.icon}`]": { - "tokens": [ - { - "property": "color", - "token": ["tokens.colorNeutralForeground2BrandHover"], - "path": ["':hover'", "[`& .${buttonClassNames.icon}`]", "color"] - } - ] - } - } - }, - "':hover:active'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorSubtleBackgroundPressed"], - "path": ["':hover:active'", "backgroundColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForeground2Pressed"], - "path": ["':hover:active'", "color"] - } - ], - "nested": { - "[`& .${buttonClassNames.icon}`]": { - "tokens": [ - { - "property": "color", - "token": ["tokens.colorNeutralForeground2BrandPressed"], - "path": ["':hover:active'", "[`& .${buttonClassNames.icon}`]", "color"] - } - ] - } - } - } - }, - "assignedVariables": ["rootStyles"] - }, - "transparent": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["backgroundColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForeground2"], - "path": ["color"] - } - ], - "nested": { - "':hover'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackgroundHover"], - "path": ["':hover'", "backgroundColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForeground2BrandHover"], - "path": ["':hover'", "color"] - } - ] - }, - "':hover:active'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackgroundPressed"], - "path": ["':hover:active'", "backgroundColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForeground2BrandPressed"], - "path": ["':hover:active'", "color"] - } - ] - }, - "'@media (forced-colors: active)'": { - "tokens": [], - "nested": { - "':hover'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["'@media (forced-colors: active)'", "':hover'", "backgroundColor"] - } - ] - }, - "':hover:active'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["'@media (forced-colors: active)'", "':hover:active'", "backgroundColor"] - } - ] - } - } - } - }, - "assignedVariables": ["rootStyles"] - }, - "circular": { - "tokens": [ - { - "property": "borderRadius", - "token": ["tokens.borderRadiusCircular"], - "path": ["borderRadius"] - } - ], - "assignedVariables": ["rootStyles"] - }, - "square": { - "tokens": [ - { - "property": "borderRadius", - "token": ["tokens.borderRadiusNone"], - "path": ["borderRadius"] - } - ], - "assignedVariables": ["rootStyles"] - }, - "small": { - "tokens": [ - { - "property": "padding", - "token": ["tokens.spacingHorizontalS"], - "path": ["padding"] - }, - { - "property": "borderRadius", - "token": ["tokens.borderRadiusMedium"], - "path": ["borderRadius"] - }, - { - "property": "fontSize", - "token": ["tokens.fontSizeBase200"], - "path": ["fontSize"] - }, - { - "property": "fontWeight", - "token": ["tokens.fontWeightRegular"], - "path": ["fontWeight"] - }, - { - "property": "lineHeight", - "token": ["tokens.lineHeightBase200"], - "path": ["lineHeight"] - } - ], - "assignedVariables": ["rootStyles"] - }, - "large": { - "tokens": [ - { - "property": "padding", - "token": ["tokens.spacingHorizontalL"], - "path": ["padding"] - }, - { - "property": "borderRadius", - "token": ["tokens.borderRadiusMedium"], - "path": ["borderRadius"] - }, - { - "property": "fontSize", - "token": ["tokens.fontSizeBase400"], - "path": ["fontSize"] - }, - { - "property": "fontWeight", - "token": ["tokens.fontWeightSemibold"], - "path": ["fontWeight"] - }, - { - "property": "lineHeight", - "token": ["tokens.lineHeightBase400"], - "path": ["lineHeight"] - } - ], - "assignedVariables": ["rootStyles"] - } - }, - "useRootDisabledStyles": { - "base": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorNeutralBackgroundDisabled"], - "path": ["backgroundColor"] - }, - { - "property": "borderTopColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["borderTopColor"] - }, - { - "property": "borderRightColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["borderRightColor"] - }, - { - "property": "borderBottomColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["borderBottomColor"] - }, - { - "property": "borderLeftColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["borderLeftColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForegroundDisabled"], - "path": ["color"] - } - ], - "nested": { - "[`& .${buttonClassNames.icon}`]": { - "tokens": [ - { - "property": "color", - "token": ["tokens.colorNeutralForegroundDisabled"], - "path": ["[`& .${buttonClassNames.icon}`]", "color"] - } - ] - }, - "':hover'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorNeutralBackgroundDisabled"], - "path": ["':hover'", "backgroundColor"] - }, - { - "property": "borderTopColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["':hover'", "borderTopColor"] - }, - { - "property": "borderRightColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["':hover'", "borderRightColor"] - }, - { - "property": "borderBottomColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["':hover'", "borderBottomColor"] - }, - { - "property": "borderLeftColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["':hover'", "borderLeftColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForegroundDisabled"], - "path": ["':hover'", "color"] - } - ], - "nested": { - "[`& .${buttonClassNames.icon}`]": { - "tokens": [ - { - "property": "color", - "token": ["tokens.colorNeutralForegroundDisabled"], - "path": ["':hover'", "[`& .${buttonClassNames.icon}`]", "color"] - } - ] - } - } - }, - "':hover:active'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorNeutralBackgroundDisabled"], - "path": ["':hover:active'", "backgroundColor"] - }, - { - "property": "borderTopColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["':hover:active'", "borderTopColor"] - }, - { - "property": "borderRightColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["':hover:active'", "borderRightColor"] - }, - { - "property": "borderBottomColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["':hover:active'", "borderBottomColor"] - }, - { - "property": "borderLeftColor", - "token": ["tokens.colorNeutralStrokeDisabled"], - "path": ["':hover:active'", "borderLeftColor"] - }, - { - "property": "color", - "token": ["tokens.colorNeutralForegroundDisabled"], - "path": ["':hover:active'", "color"] - } - ], - "nested": { - "[`& .${buttonClassNames.icon}`]": { - "tokens": [ - { - "property": "color", - "token": ["tokens.colorNeutralForegroundDisabled"], - "path": ["':hover:active'", "[`& .${buttonClassNames.icon}`]", "color"] - } - ] - } - } - } - }, - "assignedVariables": ["rootDisabledStyles"] - }, - "outline": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["backgroundColor"] - } - ], - "nested": { - "':hover'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["':hover'", "backgroundColor"] - } - ] - }, - "':hover:active'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["':hover:active'", "backgroundColor"] - } - ] - } - }, - "assignedVariables": ["rootDisabledStyles"] - }, - "subtle": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["backgroundColor"] - } - ], - "nested": { - "':hover'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["':hover'", "backgroundColor"] - } - ] - }, - "':hover:active'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["':hover:active'", "backgroundColor"] - } - ] - } - }, - "assignedVariables": ["rootDisabledStyles"] - }, - "transparent": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["backgroundColor"] - } - ], - "nested": { - "':hover'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["':hover'", "backgroundColor"] - } - ] - }, - "':hover:active'": { - "tokens": [ - { - "property": "backgroundColor", - "token": ["tokens.colorTransparentBackground"], - "path": ["':hover:active'", "backgroundColor"] - } - ] - } - }, - "assignedVariables": ["rootDisabledStyles"] - } - }, - "useRootFocusStyles": { - "circular": { - "tokens": [], - "nested": { - ":focus": { - "tokens": [ - { - "property": "borderRadius", - "token": ["tokens.borderRadiusCircular"], - "path": [":focus", "borderRadius"] - } - ] - } - }, - "assignedVariables": ["rootFocusStyles"] - }, - "square": { - "tokens": [], - "nested": { - ":focus": { - "tokens": [ - { - "property": "borderRadius", - "token": ["tokens.borderRadiusNone"], - "path": [":focus", "borderRadius"] - } - ] - } - }, - "assignedVariables": ["rootFocusStyles"] - }, - "primary": { - "tokens": [], - "nested": { - ":focus": { - "tokens": [ - { - "property": "borderTopColor", - "token": ["tokens.colorStrokeFocus2"], - "path": [":focus", "borderTopColor"] - }, - { - "property": "borderRightColor", - "token": ["tokens.colorStrokeFocus1"], - "path": [":focus", "borderRightColor"] - }, - { - "property": "borderBottomColor", - "token": ["tokens.borderRadiusCircular"], - "path": [":focus", "borderBottomColor"] - }, - { - "property": "borderLeftColor", - "token": ["tokens.colorStrokeFocus1"], - "path": [":focus", "borderLeftColor"] - }, - { - "property": "boxShadow", - "token": ["tokens.shadow2"], - "path": [":focus", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.strokeWidthThin"], - "path": [":focus", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.colorStrokeFocus2"], - "path": [":focus", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.strokeWidthThick"], - "path": [":focus", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.colorNeutralForegroundOnBrand"], - "path": [":focus", "boxShadow"] - } - ], - "nested": { - "':hover'": { - "tokens": [ - { - "property": "boxShadow", - "token": ["tokens.shadow2"], - "path": [":focus", "':hover'", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.strokeWidthThin"], - "path": [":focus", "':hover'", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.colorStrokeFocus2"], - "path": [":focus", "':hover'", "boxShadow"] - }, - { - "property": "borderTopColor", - "token": ["tokens.colorStrokeFocus2"], - "path": [":focus", "':hover'", "borderTopColor"] - }, - { - "property": "borderRightColor", - "token": ["tokens.colorStrokeFocus2"], - "path": [":focus", "':hover'", "borderRightColor"] - }, - { - "property": "borderBottomColor", - "token": ["tokens.colorStrokeFocus2"], - "path": [":focus", "':hover'", "borderBottomColor"] - }, - { - "property": "borderLeftColor", - "token": ["tokens.colorStrokeFocus2"], - "path": [":focus", "':hover'", "borderLeftColor"] - } - ] - } - } - }, - "'@supports (-moz-appearance:button)'": { - "tokens": [], - "nested": { - ":focus": { - "tokens": [ - { - "property": "boxShadow", - "token": ["tokens.shadow2"], - "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.strokeWidthThin"], - "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.colorStrokeFocus2"], - "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.strokeWidthThick"], - "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.colorNeutralForegroundOnBrand"], - "path": ["'@supports (-moz-appearance:button)'", ":focus", "boxShadow"] - } - ], - "nested": { - "':hover'": { - "tokens": [ - { - "property": "boxShadow", - "token": ["tokens.shadow2"], - "path": ["'@supports (-moz-appearance:button)'", ":focus", "':hover'", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.strokeWidthThin"], - "path": ["'@supports (-moz-appearance:button)'", ":focus", "':hover'", "boxShadow"] - }, - { - "property": "boxShadow", - "token": ["tokens.colorStrokeFocus2"], - "path": ["'@supports (-moz-appearance:button)'", ":focus", "':hover'", "boxShadow"] - } - ] - } - } - } - } - } - }, - "assignedVariables": ["rootFocusStyles"] - }, - "small": { - "tokens": [], - "nested": { - ":focus": { - "tokens": [ - { - "property": "borderRadius", - "token": ["tokens.borderRadiusSmall"], - "path": [":focus", "borderRadius"] - } - ] - } - }, - "assignedVariables": ["rootFocusStyles"] - }, - "large": { - "tokens": [], - "nested": { - ":focus": { - "tokens": [ - { - "property": "borderRadius", - "token": ["tokens.borderRadiusLarge"], - "path": [":focus", "borderRadius"] - } - ] - } - }, - "assignedVariables": ["rootFocusStyles"] - } - }, - "useRootIconOnlyStyles": {}, - "useIconStyles": { - "small": { - "tokens": [ - { - "property": "[iconSpacingVar]", - "token": ["tokens.spacingHorizontalXS"], - "path": ["[iconSpacingVar]"] - } - ], - "assignedVariables": ["iconStyles"] - }, - "large": { - "tokens": [ - { - "property": "[iconSpacingVar]", - "token": ["tokens.spacingHorizontalSNudge"], - "path": ["[iconSpacingVar]"] - } - ], - "assignedVariables": ["iconStyles"] - } - } - }, - "metadata": { - "styleConditions": { - "buttonClassNames.root": { - "isBase": true, - "slotName": "root" - }, - "rootBaseClassName": { - "isBase": true, - "slotName": "root" - }, - "rootStyles[size]": { - "isBase": true, - "slotName": "root" - }, - "rootStyles[shape]": { - "isBase": true, - "slotName": "root" - }, - "rootFocusStyles[size]": { - "isBase": true, - "slotName": "root" - }, - "rootFocusStyles[shape]": { - "isBase": true, - "slotName": "root" - }, - "state.root.className": { - "isBase": true, - "slotName": "root" - }, - "rootStyles.smallWithIcon": { - "conditions": ["icon && size === 'small'"], - "slotName": "root" - }, - "rootStyles.largeWithIcon": { - "conditions": ["icon && size === 'large'"], - "slotName": "root" - }, - "rootDisabledStyles.base": { - "conditions": ["(disabled || disabledFocusable)"], - "slotName": "root" - }, - "rootDisabledStyles.highContrast": { - "conditions": ["(disabled || disabledFocusable)"], - "slotName": "root" - }, - "rootFocusStyles.primary": { - "conditions": ["appearance === 'primary'"], - "slotName": "root" - }, - "buttonClassNames.icon": { - "isBase": true, - "slotName": "icon" - }, - "iconBaseClassName": { - "isBase": true, - "slotName": "icon" - }, - "iconStyles[size]": { - "isBase": true, - "slotName": "icon" - }, - "state.icon.className": { - "isBase": true, - "slotName": "icon" - } - } - } - } -} From aeede416974616bd60403bcf606d4004cfe0b76d Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 23 May 2025 12:43:57 -0700 Subject: [PATCH 66/75] deduping yargs types --- yarn.lock | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/yarn.lock b/yarn.lock index 419491414..26a6bf0ac 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6290,20 +6290,13 @@ dependencies: "@types/yargs-parser" "*" -"@types/yargs@^17.0.33": +"@types/yargs@^17.0.33", "@types/yargs@^17.0.8": version "17.0.33" resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.33.tgz#8c32303da83eec050a84b3c7ae7b9f922d13e32d" integrity sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA== dependencies: "@types/yargs-parser" "*" -"@types/yargs@^17.0.8": - version "17.0.24" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.24.tgz#b3ef8d50ad4aa6aecf6ddc97c580a00f5aa11902" - integrity sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw== - dependencies: - "@types/yargs-parser" "*" - "@typescript-eslint/eslint-plugin@8.32.1": version "8.32.1" resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.32.1.tgz#9185b3eaa3b083d8318910e12d56c68b3c4f45b4" @@ -15213,12 +15206,12 @@ safe-array-concat@^1.1.3: has-symbols "^1.1.0" isarray "^2.0.5" -safe-buffer@5.1.2, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@5.2.1, safe-buffer@~5.2.0: +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.2, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== From bfa9c54e62bd39e6edd007033816ab13dbe852ea Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 23 May 2025 12:49:33 -0700 Subject: [PATCH 67/75] fixing bad merge. --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index afecc4a59..955b18e5d 100644 --- a/package.json +++ b/package.json @@ -106,7 +106,7 @@ "tslib": "^2.3.0", "typescript": "5.7.3", "typescript-eslint": "8.32.1", - "verdaccio": "6.1.1", + "verdaccio": "6.1.2", "yargs": "^17.7.2" }, "dependencies": {}, From a1184a24003eeedb883c35190cf79672f2ce80c2 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 23 May 2025 14:31:56 -0700 Subject: [PATCH 68/75] fixing dedupe issues. --- yarn.lock | 184 +++++++++++++++++++++++++++--------------------------- 1 file changed, 92 insertions(+), 92 deletions(-) diff --git a/yarn.lock b/yarn.lock index 26a6bf0ac..2ecb2fc81 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6378,19 +6378,19 @@ "@typescript-eslint/types" "8.32.1" eslint-visitor-keys "^4.2.0" -"@verdaccio/auth@8.0.0-next-8.14": - version "8.0.0-next-8.14" - resolved "https://registry.yarnpkg.com/@verdaccio/auth/-/auth-8.0.0-next-8.14.tgz#79caa7ffdf4119a0dcf7708cdb4514ed050bc694" - integrity sha512-OOuPVsfo1Y+/84pxS1KMSTtf5sc67PfNByzhBwC8cX4E1WiArHuP+rKfEqqCLu4NaN1BVNW4HiqCbZ1GzDyWtQ== +"@verdaccio/auth@8.0.0-next-8.15": + version "8.0.0-next-8.15" + resolved "https://registry.yarnpkg.com/@verdaccio/auth/-/auth-8.0.0-next-8.15.tgz#9516255e9c1a096c018e75137af683f8351921f9" + integrity sha512-vAfzGOHbPcPXMCI90jqm/qSZ1OUBnOGzudZA3+YtherncdwADekvXbdJlZVclcfmZ0sRbfVG5Xpf88aETiwfcw== dependencies: - "@verdaccio/config" "8.0.0-next-8.14" - "@verdaccio/core" "8.0.0-next-8.14" + "@verdaccio/config" "8.0.0-next-8.15" + "@verdaccio/core" "8.0.0-next-8.15" "@verdaccio/loaders" "8.0.0-next-8.6" - "@verdaccio/signature" "8.0.0-next-8.6" - "@verdaccio/utils" "8.1.0-next-8.14" + "@verdaccio/signature" "8.0.0-next-8.7" + "@verdaccio/utils" "8.1.0-next-8.15" debug "4.4.0" lodash "4.17.21" - verdaccio-htpasswd "13.0.0-next-8.14" + verdaccio-htpasswd "13.0.0-next-8.15" "@verdaccio/commons-api@10.2.0": version "10.2.0" @@ -6400,22 +6400,22 @@ http-errors "2.0.0" http-status-codes "2.2.0" -"@verdaccio/config@8.0.0-next-8.14": - version "8.0.0-next-8.14" - resolved "https://registry.yarnpkg.com/@verdaccio/config/-/config-8.0.0-next-8.14.tgz#de9d2b45e56103bd2e4e7886952d781085a82f62" - integrity sha512-5YOiYfh9a0q+9ARU4NYnfl1YJunNKfyz1x2ONEhvmHkP11E782ep7ZtrJhgg/ZJTtOmjbCspgw6FN3WBe3WlLQ== +"@verdaccio/config@8.0.0-next-8.15": + version "8.0.0-next-8.15" + resolved "https://registry.yarnpkg.com/@verdaccio/config/-/config-8.0.0-next-8.15.tgz#de37c2e3c50bad236173d4c0adfbebc52f863fae" + integrity sha512-oEzQB+xeqaFAy54veMshqpt1hlZCYNkqoKuwkt7O8J43Fo/beiLluKUVneXckzi+pg1yvvGT7lNCbvuUQrxxQg== dependencies: - "@verdaccio/core" "8.0.0-next-8.14" - "@verdaccio/utils" "8.1.0-next-8.14" + "@verdaccio/core" "8.0.0-next-8.15" + "@verdaccio/utils" "8.1.0-next-8.15" debug "4.4.0" js-yaml "4.1.0" lodash "4.17.21" minimatch "7.4.6" -"@verdaccio/core@8.0.0-next-8.14": - version "8.0.0-next-8.14" - resolved "https://registry.yarnpkg.com/@verdaccio/core/-/core-8.0.0-next-8.14.tgz#2f0e190e471065cd48ed1bdd0a6ec0bb58254986" - integrity sha512-qj4KLkaVfMzqhDFn9xqpRvJQglalmgSJig0h8BzgI/fXQWlhmC0JI2TP0cJRYEhALN3UqjDAQQHKtqa3FqlfSw== +"@verdaccio/core@8.0.0-next-8.15": + version "8.0.0-next-8.15" + resolved "https://registry.yarnpkg.com/@verdaccio/core/-/core-8.0.0-next-8.15.tgz#8041767fb7ecb844476f9c6f253b9536b1aa69af" + integrity sha512-d5r/ZSkCri7s1hvV35enptquV5LJ81NqMYJnsjuryIUnvwn1yaqLlcdd6zIL08unzCSr7qDdUAdwGRRm6PKzng== dependencies: ajv "8.17.1" core-js "3.40.0" @@ -6460,12 +6460,12 @@ lowdb "1.0.0" mkdirp "1.0.4" -"@verdaccio/logger-commons@8.0.0-next-8.14": - version "8.0.0-next-8.14" - resolved "https://registry.yarnpkg.com/@verdaccio/logger-commons/-/logger-commons-8.0.0-next-8.14.tgz#92c15a1b0f62fd74b16c541a29d57281e0e0f520" - integrity sha512-NMRnYg2tWOOvyds5JwjWCKuGWVC5wvK/PIp0pCYQKsB8SsTnBAy1/9XL2gGBSKRkYju7CFO50Q8lRwQMn6k48A== +"@verdaccio/logger-commons@8.0.0-next-8.15": + version "8.0.0-next-8.15" + resolved "https://registry.yarnpkg.com/@verdaccio/logger-commons/-/logger-commons-8.0.0-next-8.15.tgz#d15b66c2be2d287731fa88cbe3a87b035f35eae1" + integrity sha512-nF7VgBC2cl5ufv+mZEwBHHyZFb1F0+kVkuRMf3Tyk+Qp4lXilC9MRZ0oc+RnzsDbNmJ6IZHgHNbs6aJrNfaRGg== dependencies: - "@verdaccio/core" "8.0.0-next-8.14" + "@verdaccio/core" "8.0.0-next-8.15" "@verdaccio/logger-prettify" "8.0.0-next-8.2" colorette "2.0.20" debug "4.4.0" @@ -6481,23 +6481,23 @@ pino-abstract-transport "1.2.0" sonic-boom "3.8.1" -"@verdaccio/logger@8.0.0-next-8.14": - version "8.0.0-next-8.14" - resolved "https://registry.yarnpkg.com/@verdaccio/logger/-/logger-8.0.0-next-8.14.tgz#ba2dd88207b17e20542710cd42ea0ae0c60f7eab" - integrity sha512-gddIKMRKDdtbnoCr9R9Yg38gD/lsydZCaehdaTVuolVU9y8iJLZtfjwjU0i2Jko8Qloc8p0CRifSnZhT4fiOug== +"@verdaccio/logger@8.0.0-next-8.15": + version "8.0.0-next-8.15" + resolved "https://registry.yarnpkg.com/@verdaccio/logger/-/logger-8.0.0-next-8.15.tgz#da0b3a95684bc20a37fd396f5a4692abb79d58bc" + integrity sha512-3gjhqvB87JUNDHFMN3YG4IweS9EgbCpAWZatNYzcoIWOoGiEaFQQBSM592CaFiI0yf8acyqWkNa1V95L1NMbRg== dependencies: - "@verdaccio/logger-commons" "8.0.0-next-8.14" + "@verdaccio/logger-commons" "8.0.0-next-8.15" pino "9.6.0" -"@verdaccio/middleware@8.0.0-next-8.14": - version "8.0.0-next-8.14" - resolved "https://registry.yarnpkg.com/@verdaccio/middleware/-/middleware-8.0.0-next-8.14.tgz#2494707425cb0619cfa6251a05dc10d2b64beff9" - integrity sha512-ieYP4TJ2jaBR23NUDwYlVJYh9rLx3Zi3NdKIc5snbNY1IVMXd4O9j+Wl3FBwibZIvHbuXRRVFk3c32irPPu0KA== +"@verdaccio/middleware@8.0.0-next-8.15": + version "8.0.0-next-8.15" + resolved "https://registry.yarnpkg.com/@verdaccio/middleware/-/middleware-8.0.0-next-8.15.tgz#48bf1cbf75973d775fc105d080d0e1b374ac03e6" + integrity sha512-xsCLGbnhqcYwE8g/u9wxNLfDcESpr9ptEZ8Ce7frVTphU7kYIL48QCDPMzug7U+AguNtCq4v4zcoY1PaOQ8mgw== dependencies: - "@verdaccio/config" "8.0.0-next-8.14" - "@verdaccio/core" "8.0.0-next-8.14" - "@verdaccio/url" "13.0.0-next-8.14" - "@verdaccio/utils" "8.1.0-next-8.14" + "@verdaccio/config" "8.0.0-next-8.15" + "@verdaccio/core" "8.0.0-next-8.15" + "@verdaccio/url" "13.0.0-next-8.15" + "@verdaccio/utils" "8.1.0-next-8.15" debug "4.4.0" express "4.21.2" express-rate-limit "5.5.1" @@ -6510,12 +6510,12 @@ resolved "https://registry.yarnpkg.com/@verdaccio/search-indexer/-/search-indexer-8.0.0-next-8.4.tgz#2ee936cc9af406479bb1c58dae2a72c773861a2c" integrity sha512-Oea9m9VDqdlDPyQ9+fpcxZk0sIYH2twVK+YbykHpSYpjZRzz9hJfIr/uUwAgpWq83zAl2YDbz4zR3TjzjrWQig== -"@verdaccio/signature@8.0.0-next-8.6": - version "8.0.0-next-8.6" - resolved "https://registry.yarnpkg.com/@verdaccio/signature/-/signature-8.0.0-next-8.6.tgz#f840e034f143a9fde311b920368fd9b42e05198e" - integrity sha512-fIYC0mluIUmQBN47SYrZ1+I0UquSbAiGV5+n3ime4gyGJjDoDE28yB7BkqD869FOypFTjzzCMsxN0fuRU94UYg== +"@verdaccio/signature@8.0.0-next-8.7": + version "8.0.0-next-8.7" + resolved "https://registry.yarnpkg.com/@verdaccio/signature/-/signature-8.0.0-next-8.7.tgz#faebf6268831f6eaac51d7111eb164e0317c9f77" + integrity sha512-sqP+tNzUtVIwUtt1ZHwYoxsO3roDLK7GW8c8Hj0SNaON+9ele9z4NBhaor+g95zRuLy6xtw/RgOvpyLon/vPrA== dependencies: - "@verdaccio/config" "8.0.0-next-8.14" + "@verdaccio/config" "8.0.0-next-8.15" debug "4.4.0" jsonwebtoken "9.0.2" @@ -6524,40 +6524,40 @@ resolved "https://registry.yarnpkg.com/@verdaccio/streams/-/streams-10.2.1.tgz#9443d24d4f17672b8f8c8e147690557918ed2bcb" integrity sha512-OojIG/f7UYKxC4dYX8x5ax8QhRx1b8OYUAMz82rUottCuzrssX/4nn5QE7Ank0DUSX3C9l/HPthc4d9uKRJqJQ== -"@verdaccio/tarball@13.0.0-next-8.14": - version "13.0.0-next-8.14" - resolved "https://registry.yarnpkg.com/@verdaccio/tarball/-/tarball-13.0.0-next-8.14.tgz#47cce274177b1bb7034c5cfd52bc221ab5485e34" - integrity sha512-7da3X1L+AsswI0RPOor3sWoLU4zl611Qgnms3mOBF+Y7pNwHF1KsrkXBM5aV/sxr0+FYsrU2bDrxHKg5aCWi0w== +"@verdaccio/tarball@13.0.0-next-8.15": + version "13.0.0-next-8.15" + resolved "https://registry.yarnpkg.com/@verdaccio/tarball/-/tarball-13.0.0-next-8.15.tgz#9d043730dc36ecfd329501d22670480cdee76d8e" + integrity sha512-oSNmq7zD/iPIC5HpJbOJjW/lb0JV9k3jLwI6sG7kPgm+UIxVAOV4fKQOAD18HpHl/WjkF247NA6zGlAB94Habw== dependencies: - "@verdaccio/core" "8.0.0-next-8.14" - "@verdaccio/url" "13.0.0-next-8.14" - "@verdaccio/utils" "8.1.0-next-8.14" + "@verdaccio/core" "8.0.0-next-8.15" + "@verdaccio/url" "13.0.0-next-8.15" + "@verdaccio/utils" "8.1.0-next-8.15" debug "4.4.0" gunzip-maybe "^1.4.2" lodash "4.17.21" tar-stream "^3.1.7" -"@verdaccio/ui-theme@8.0.0-next-8.14": - version "8.0.0-next-8.14" - resolved "https://registry.yarnpkg.com/@verdaccio/ui-theme/-/ui-theme-8.0.0-next-8.14.tgz#2de0cf58879b085ceeb9213830707afa8a9a799f" - integrity sha512-BW61qb3EGIoZrzb2AhrAqnpuim5Me7xFCR8VQPgJIya7z/zVhd+b3t4gnaOCowPnXgGrAvIyWuhVg92hnDh9pQ== +"@verdaccio/ui-theme@8.0.0-next-8.15": + version "8.0.0-next-8.15" + resolved "https://registry.yarnpkg.com/@verdaccio/ui-theme/-/ui-theme-8.0.0-next-8.15.tgz#4eac7390b94fd4b8578f7165062f559c083391fc" + integrity sha512-k9BAM7rvbUqB2JPReNgXKUVTzBkdmIrNw0f6/7uyO+9cp7eVuarrPBnVF0oMc7jzVNBZRCpUksrhMZ0KwDZTpw== -"@verdaccio/url@13.0.0-next-8.14": - version "13.0.0-next-8.14" - resolved "https://registry.yarnpkg.com/@verdaccio/url/-/url-13.0.0-next-8.14.tgz#675d69a13e908cfe00c579c4e77e1479d3e18733" - integrity sha512-3DOPuaZAPSXh2tyYr0L6CSI9Gbd8qmWA5d4WtxH7yxVbRIEloiFOINBy4NaBZVtwfACANG22OtEd1mdvGjHcvA== +"@verdaccio/url@13.0.0-next-8.15": + version "13.0.0-next-8.15" + resolved "https://registry.yarnpkg.com/@verdaccio/url/-/url-13.0.0-next-8.15.tgz#29c67bd865b7e362bf63cde74d3753d26a3ef418" + integrity sha512-1N/dGhw7cZMhupf/Xlm73beiL3oCaAiyo9DTumjF3aTcJnipVcT1hoj6CSj9RIX54824rUK9WVmo83dk0KPnjw== dependencies: - "@verdaccio/core" "8.0.0-next-8.14" + "@verdaccio/core" "8.0.0-next-8.15" debug "4.4.0" lodash "4.17.21" validator "13.12.0" -"@verdaccio/utils@8.1.0-next-8.14": - version "8.1.0-next-8.14" - resolved "https://registry.yarnpkg.com/@verdaccio/utils/-/utils-8.1.0-next-8.14.tgz#904362ee83551efc7b2473ea32efef78e8ef5235" - integrity sha512-ylEpOi1JD4NJeLqSunABsnmqZOAXyS3H5s2ysHnA68TNvfV/CCZwhinRNyjxiedOzJeZhSowiWwFynBD/f6dew== +"@verdaccio/utils@8.1.0-next-8.15": + version "8.1.0-next-8.15" + resolved "https://registry.yarnpkg.com/@verdaccio/utils/-/utils-8.1.0-next-8.15.tgz#d37d5d43ad8f5fded8cc1df6411d5babba6f461d" + integrity sha512-efg/bunOUMVXV+MlljJCrpuT+OQRrQS4wJyGL92B3epUGlgZ8DXs+nxN5v59v1a6AocAdSKwHgZS0g9txmBhOg== dependencies: - "@verdaccio/core" "8.0.0-next-8.14" + "@verdaccio/core" "8.0.0-next-8.15" lodash "4.17.21" minimatch "7.4.6" semver "7.7.1" @@ -15206,12 +15206,12 @@ safe-array-concat@^1.1.3: has-symbols "^1.1.0" isarray "^2.0.5" -safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@5.1.2, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.2, safe-buffer@~5.2.0: +safe-buffer@5.2.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== @@ -17181,23 +17181,23 @@ vary@^1, vary@^1.1.2, vary@~1.1.2: resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== -verdaccio-audit@13.0.0-next-8.14: - version "13.0.0-next-8.14" - resolved "https://registry.yarnpkg.com/verdaccio-audit/-/verdaccio-audit-13.0.0-next-8.14.tgz#2637f5fd71fa70a5ad6bee705ff633735f441c6b" - integrity sha512-btV62SbeBDtjdWsZN8RhzSE9ef89yYtdGWeRNSlYTwB/1qvQS//GhNWpUQvD83fPDUMrfo2o7tvmzIML6kOCNQ== +verdaccio-audit@13.0.0-next-8.15: + version "13.0.0-next-8.15" + resolved "https://registry.yarnpkg.com/verdaccio-audit/-/verdaccio-audit-13.0.0-next-8.15.tgz#1a759de5793169ca1c92a7d01a480d56c85513f5" + integrity sha512-Aeau0u0fi5l4PoSDyOV6glz2FDO9+ofvogJIELV4H6fhDXhgPc2MnoKuaUgOT//khESLle/a6YfcLY2/KNLs6g== dependencies: - "@verdaccio/config" "8.0.0-next-8.14" - "@verdaccio/core" "8.0.0-next-8.14" + "@verdaccio/config" "8.0.0-next-8.15" + "@verdaccio/core" "8.0.0-next-8.15" express "4.21.2" https-proxy-agent "5.0.1" node-fetch cjs -verdaccio-htpasswd@13.0.0-next-8.14: - version "13.0.0-next-8.14" - resolved "https://registry.yarnpkg.com/verdaccio-htpasswd/-/verdaccio-htpasswd-13.0.0-next-8.14.tgz#02cc8294cf285db2ef63634ca18f156cc9f4fa82" - integrity sha512-JOXlm1g6MrZgOhzvgQfTNBmmstMJVTSiXmXbWCxSRjTzRX/v1WIdhwIWEzhn+lJUEWKv+1Z17dSSk6NiVZW/8Q== +verdaccio-htpasswd@13.0.0-next-8.15: + version "13.0.0-next-8.15" + resolved "https://registry.yarnpkg.com/verdaccio-htpasswd/-/verdaccio-htpasswd-13.0.0-next-8.15.tgz#59cd659a45cba3a8f592943b32be8b9a7d4ddc11" + integrity sha512-rQg5oZ/rReDAM4g4W68hvtzReTbM6vduvVtobHsQxhbtbotEuUjP6O8uaROYtgZ60giGva5Tub2SOm2T9Ln9Dw== dependencies: - "@verdaccio/core" "8.0.0-next-8.14" + "@verdaccio/core" "8.0.0-next-8.15" "@verdaccio/file-locking" "13.0.0-next-8.3" apache-md5 "1.1.8" bcryptjs "2.4.3" @@ -17206,26 +17206,26 @@ verdaccio-htpasswd@13.0.0-next-8.14: http-errors "2.0.0" unix-crypt-td-js "1.1.4" -verdaccio@6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/verdaccio/-/verdaccio-6.1.1.tgz#44de63550ff6aa6b81e287a312a974b01aa0be1b" - integrity sha512-kemqW6Y/VA2Z7I8Dn3seyMzZNlnIQaP8l9mtHKriW26fxlAD2K+9esSTn8gsZej038Z7sbqpy1RFJNOZ5BPr0w== +verdaccio@6.1.2: + version "6.1.2" + resolved "https://registry.yarnpkg.com/verdaccio/-/verdaccio-6.1.2.tgz#3862491cddc6bb127a2458e49da4412af5385407" + integrity sha512-HQCquycSQkA+tKRVqMjIVRzmhzTciLfScvKIhhiwZZ9Qd13e2KJQTOdB7QrSacfJuPpl94TA5EZ7XmVRQKk3ag== dependencies: "@cypress/request" "3.0.8" - "@verdaccio/auth" "8.0.0-next-8.14" - "@verdaccio/config" "8.0.0-next-8.14" - "@verdaccio/core" "8.0.0-next-8.14" + "@verdaccio/auth" "8.0.0-next-8.15" + "@verdaccio/config" "8.0.0-next-8.15" + "@verdaccio/core" "8.0.0-next-8.15" "@verdaccio/loaders" "8.0.0-next-8.6" "@verdaccio/local-storage-legacy" "11.0.2" - "@verdaccio/logger" "8.0.0-next-8.14" - "@verdaccio/middleware" "8.0.0-next-8.14" + "@verdaccio/logger" "8.0.0-next-8.15" + "@verdaccio/middleware" "8.0.0-next-8.15" "@verdaccio/search-indexer" "8.0.0-next-8.4" - "@verdaccio/signature" "8.0.0-next-8.6" + "@verdaccio/signature" "8.0.0-next-8.7" "@verdaccio/streams" "10.2.1" - "@verdaccio/tarball" "13.0.0-next-8.14" - "@verdaccio/ui-theme" "8.0.0-next-8.14" - "@verdaccio/url" "13.0.0-next-8.14" - "@verdaccio/utils" "8.1.0-next-8.14" + "@verdaccio/tarball" "13.0.0-next-8.15" + "@verdaccio/ui-theme" "8.0.0-next-8.15" + "@verdaccio/url" "13.0.0-next-8.15" + "@verdaccio/utils" "8.1.0-next-8.15" JSONStream "1.3.5" async "3.2.6" clipanion "4.0.0-rc.4" @@ -17241,8 +17241,8 @@ verdaccio@6.1.1: mkdirp "1.0.4" pkginfo "0.4.1" semver "7.6.3" - verdaccio-audit "13.0.0-next-8.14" - verdaccio-htpasswd "13.0.0-next-8.14" + verdaccio-audit "13.0.0-next-8.15" + verdaccio-htpasswd "13.0.0-next-8.15" verror@1.10.0: version "1.10.0" From 9a4c583c8b992ae1e5580157758681c107e92952 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 23 May 2025 14:47:08 -0700 Subject: [PATCH 69/75] updates to packages to avoid syncpack issues --- package.json | 2 +- packages/token-analyzer/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 955b18e5d..a08366f88 100644 --- a/package.json +++ b/package.json @@ -35,7 +35,7 @@ "@fluentui/react-shared-contexts": "^9.7.2", "@fluentui/scheme-utilities": "^8.3.58", "@fluentui/semantic-tokens": "0.0.0-nightly-20250501-1704.1", - "@griffel/react": "^1.5.22", + "@griffel/react": "^1.5.14", "@griffel/shadow-dom": "~0.2.0", "@nx/devkit": "20.8.1", "@nx/eslint": "20.8.1", diff --git a/packages/token-analyzer/package.json b/packages/token-analyzer/package.json index a4283e2be..99d4a15ef 100644 --- a/packages/token-analyzer/package.json +++ b/packages/token-analyzer/package.json @@ -7,7 +7,7 @@ "ts-morph": "^24.0.0", "typescript": "5.7.3", "prettier": "^2.6.2", - "@griffel/react": "^1.5.22", + "@griffel/react": "^1.5.14", "yargs": "^17.7.2" }, "scripts": { From 0bd6e9cdc34235e79274c181430b7fe7d0ffca42 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 23 May 2025 16:10:20 -0700 Subject: [PATCH 70/75] formatting files --- .../src/__tests__/analyzer.test.ts | 4 +- .../src/__tests__/moduleResolver.test.ts | 36 +----- .../src/__tests__/packageImports.test.ts | 108 ++++++------------ .../src/__tests__/verifyFileExists.test.ts | 2 +- packages/token-analyzer/src/debugUtils.ts | 5 +- packages/token-analyzer/src/fileOperations.ts | 5 +- packages/token-analyzer/src/moduleResolver.ts | 21 +--- 7 files changed, 46 insertions(+), 135 deletions(-) diff --git a/packages/token-analyzer/src/__tests__/analyzer.test.ts b/packages/token-analyzer/src/__tests__/analyzer.test.ts index 69d1cb76a..72f806bf3 100644 --- a/packages/token-analyzer/src/__tests__/analyzer.test.ts +++ b/packages/token-analyzer/src/__tests__/analyzer.test.ts @@ -79,8 +79,6 @@ describe('Token Analyzer', () => { conditions: ['disabled'], slotName: 'root', }); - expect(metadata.styleConditions['styles.large'].conditions).toContain( - "size === 'large'" - ); + expect(metadata.styleConditions['styles.large'].conditions).toContain("size === 'large'"); }); }); diff --git a/packages/token-analyzer/src/__tests__/moduleResolver.test.ts b/packages/token-analyzer/src/__tests__/moduleResolver.test.ts index b928f07ba..b1b6d9063 100644 --- a/packages/token-analyzer/src/__tests__/moduleResolver.test.ts +++ b/packages/token-analyzer/src/__tests__/moduleResolver.test.ts @@ -96,11 +96,7 @@ describe('Module resolver functions', () => { test('resolves nested relative path correctly', () => { const sourceFilePath = path.join(TEST_DIR, 'source.ts'); - const result = resolveModulePath( - project, - './styles/theme', - sourceFilePath - ); + const result = resolveModulePath(project, './styles/theme', sourceFilePath); expect(result).not.toBeNull(); expect(result).toEqual(path.join(TEST_DIR, 'styles/theme.ts')); @@ -116,11 +112,7 @@ describe('Module resolver functions', () => { test('returns null for non-existent module', () => { const sourceFilePath = path.join(TEST_DIR, 'source.ts'); - const result = resolveModulePath( - project, - './non-existent', - sourceFilePath - ); + const result = resolveModulePath(project, './non-existent', sourceFilePath); expect(result).toBeNull(); }); @@ -139,11 +131,7 @@ describe('Module resolver functions', () => { }); // Second call should use cache - const secondResult = resolveModulePath( - project, - './utils', - sourceFilePath - ); + const secondResult = resolveModulePath(project, './utils', sourceFilePath); expect(secondResult).toEqual(firstResult); // Restore original function @@ -167,11 +155,7 @@ describe('Module resolver functions', () => { project.addSourceFileAtPath(sourceFilePath); // First call - const firstResult = getModuleSourceFile( - project, - './utils', - sourceFilePath - ); + const firstResult = getModuleSourceFile(project, './utils', sourceFilePath); expect(firstResult).not.toBeNull(); // Mock project.addSourceFileAtPath to verify cache is used @@ -181,11 +165,7 @@ describe('Module resolver functions', () => { }); // Second call should use cache - const secondResult = getModuleSourceFile( - project, - './utils', - sourceFilePath - ); + const secondResult = getModuleSourceFile(project, './utils', sourceFilePath); expect(secondResult).toBe(firstResult); // Same instance // Restore original function @@ -196,11 +176,7 @@ describe('Module resolver functions', () => { const sourceFilePath = path.join(TEST_DIR, 'source.ts'); project.addSourceFileAtPath(sourceFilePath); - const result = getModuleSourceFile( - project, - './non-existent', - sourceFilePath - ); + const result = getModuleSourceFile(project, './non-existent', sourceFilePath); expect(result).toBeNull(); }); }); diff --git a/packages/token-analyzer/src/__tests__/packageImports.test.ts b/packages/token-analyzer/src/__tests__/packageImports.test.ts index ca51a3bff..682054550 100644 --- a/packages/token-analyzer/src/__tests__/packageImports.test.ts +++ b/packages/token-analyzer/src/__tests__/packageImports.test.ts @@ -1,10 +1,6 @@ // packageImports.test.ts import { Project, ModuleResolutionKind, ScriptTarget } from 'ts-morph'; -import { - resolveModulePath, - clearModuleCache, - tsUtils, -} from '../moduleResolver'; +import { resolveModulePath, clearModuleCache, tsUtils } from '../moduleResolver'; import * as path from 'path'; import * as fs from 'fs'; import { findTsConfigPath } from '../findTsConfigPath'; @@ -119,31 +115,19 @@ describe('Package imports resolution', () => { // Mock the TypeScript resolution for scoped packages tsUtils.resolveModuleName = jest .fn() - .mockImplementation( - ( - moduleName: string, - containingFile: string, - compilerOptions: any, - host: any - ) => { - if (moduleName === '@scope/package') { - return { - resolvedModule: { - resolvedFileName: path.join(SCOPED_PACKAGE, 'index.js'), - extension: '.js', - isExternalLibraryImport: true, - }, - }; - } - // Call original for other cases - return originalResolve( - moduleName, - containingFile, - compilerOptions, - host - ); + .mockImplementation((moduleName: string, containingFile: string, compilerOptions: any, host: any) => { + if (moduleName === '@scope/package') { + return { + resolvedModule: { + resolvedFileName: path.join(SCOPED_PACKAGE, 'index.js'), + extension: '.js', + isExternalLibraryImport: true, + }, + }; } - ); + // Call original for other cases + return originalResolve(moduleName, containingFile, compilerOptions, host); + }); const result = resolveModulePath(project, '@scope/package', sourceFilePath); @@ -158,31 +142,19 @@ describe('Package imports resolution', () => { // Mock the TypeScript resolution for regular packages tsUtils.resolveModuleName = jest .fn() - .mockImplementation( - ( - moduleName: string, - containingFile: string, - compilerOptions: any, - host: any - ) => { - if (moduleName === 'some-package') { - return { - resolvedModule: { - resolvedFileName: path.join(REGULAR_PACKAGE, 'lib', 'index.js'), - extension: '.js', - isExternalLibraryImport: true, - }, - }; - } - // Call original for other cases - return originalResolve( - moduleName, - containingFile, - compilerOptions, - host - ); + .mockImplementation((moduleName: string, containingFile: string, compilerOptions: any, host: any) => { + if (moduleName === 'some-package') { + return { + resolvedModule: { + resolvedFileName: path.join(REGULAR_PACKAGE, 'lib', 'index.js'), + extension: '.js', + isExternalLibraryImport: true, + }, + }; } - ); + // Call original for other cases + return originalResolve(moduleName, containingFile, compilerOptions, host); + }); const result = resolveModulePath(project, 'some-package', sourceFilePath); @@ -197,31 +169,15 @@ describe('Package imports resolution', () => { // Mock the TypeScript resolution to return null for non-existent packages tsUtils.resolveModuleName = jest .fn() - .mockImplementation( - ( - moduleName: string, - containingFile: string, - compilerOptions: any, - host: any - ) => { - if (moduleName === 'non-existent-package') { - return { resolvedModule: undefined }; - } - // Call original for other cases - return originalResolve( - moduleName, - containingFile, - compilerOptions, - host - ); + .mockImplementation((moduleName: string, containingFile: string, compilerOptions: any, host: any) => { + if (moduleName === 'non-existent-package') { + return { resolvedModule: undefined }; } - ); + // Call original for other cases + return originalResolve(moduleName, containingFile, compilerOptions, host); + }); - const result = resolveModulePath( - project, - 'non-existent-package', - sourceFilePath - ); + const result = resolveModulePath(project, 'non-existent-package', sourceFilePath); expect(result).toBeNull(); expect(tsUtils.resolveModuleName).toHaveBeenCalled(); diff --git a/packages/token-analyzer/src/__tests__/verifyFileExists.test.ts b/packages/token-analyzer/src/__tests__/verifyFileExists.test.ts index a9f5937da..9e48d7b65 100644 --- a/packages/token-analyzer/src/__tests__/verifyFileExists.test.ts +++ b/packages/token-analyzer/src/__tests__/verifyFileExists.test.ts @@ -52,7 +52,7 @@ describe('verifyFileExists', () => { test('uses tsUtils.fileExists when available', () => { // Mock the tsUtils.fileExists function - tsUtils.fileExists = jest.fn().mockImplementation(filePath => { + tsUtils.fileExists = jest.fn().mockImplementation((filePath) => { return filePath === EXISTING_FILE; }); diff --git a/packages/token-analyzer/src/debugUtils.ts b/packages/token-analyzer/src/debugUtils.ts index d55192f5f..6f492308e 100644 --- a/packages/token-analyzer/src/debugUtils.ts +++ b/packages/token-analyzer/src/debugUtils.ts @@ -28,10 +28,7 @@ export const error = (message: string, errorArg: any): void => { console.error(`${prefix}${message}`, errorArg); }; -export const measureAsync = async ( - name: string, - fn: () => Promise -): Promise => { +export const measureAsync = async (name: string, fn: () => Promise): Promise => { if (!config.perf) { return fn(); } diff --git a/packages/token-analyzer/src/fileOperations.ts b/packages/token-analyzer/src/fileOperations.ts index 97a70a528..419d871b6 100644 --- a/packages/token-analyzer/src/fileOperations.ts +++ b/packages/token-analyzer/src/fileOperations.ts @@ -39,10 +39,7 @@ export async function findStyleFiles(dir: string): Promise { * @param currentFilePath The path of the file containing the import * @returns Resolved absolute path or null if not found */ -export async function resolveImportPath( - importPath: string, - currentFilePath: string -): Promise { +export async function resolveImportPath(importPath: string, currentFilePath: string): Promise { if (!importPath.startsWith('.')) { return null; } diff --git a/packages/token-analyzer/src/moduleResolver.ts b/packages/token-analyzer/src/moduleResolver.ts index 7d3b0f3c3..540f7cdfe 100644 --- a/packages/token-analyzer/src/moduleResolver.ts +++ b/packages/token-analyzer/src/moduleResolver.ts @@ -28,10 +28,7 @@ export const resolvedFilesCache = new Map(); /** * Creates a cache key for module resolution */ -function createCacheKey( - moduleSpecifier: string, - containingFile: string -): string { +function createCacheKey(moduleSpecifier: string, containingFile: string): string { return `${containingFile}:${moduleSpecifier}`; } @@ -66,11 +63,7 @@ function verifyFileExists(filePath: string | undefined | null): boolean { * @param containingFile The file containing the import * @returns The absolute file path or null if it can't be resolved */ -export function resolveModulePath( - project: Project, - moduleSpecifier: string, - containingFile: string -): string | null { +export function resolveModulePath(project: Project, moduleSpecifier: string, containingFile: string): string | null { const cacheKey = createCacheKey(moduleSpecifier, containingFile); // Check cache first @@ -88,9 +81,7 @@ export function resolveModulePath( const extensions = ['.ts', '.tsx', '.js', '.jsx', '.d.ts']; // Check if the module specifier already has a valid extension - const hasExtension = extensions.some((ext) => - moduleSpecifier.endsWith(ext) - ); + const hasExtension = extensions.some((ext) => moduleSpecifier.endsWith(ext)); // 1. If it has an extension, try the exact path first if (hasExtension) { @@ -175,11 +166,7 @@ export function getModuleSourceFile( log(`Resolving module: ${moduleSpecifier} from ${containingFile}`); // Step 1: Try to resolve the module to a file path - const resolvedPath = resolveModulePath( - project, - moduleSpecifier, - containingFile - ); + const resolvedPath = resolveModulePath(project, moduleSpecifier, containingFile); if (!resolvedPath) { log(`Could not resolve module: ${moduleSpecifier}`); return null; From ba6eb15e241b940116666e812cfe2767b95a29f5 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Thu, 29 May 2025 14:08:44 -0700 Subject: [PATCH 71/75] updating lock --- yarn.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn.lock b/yarn.lock index 938b6696b..16018f7a3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -16743,7 +16743,7 @@ ts-loader@^9.3.1: micromatch "^4.0.0" semver "^7.3.4" -ts-morph@24.0.0: +ts-morph@24.0.0, ts-morph@^24.0.0: version "24.0.0" resolved "https://registry.yarnpkg.com/ts-morph/-/ts-morph-24.0.0.tgz#6249b526ade40cf99c8803e7abdae6c65882e58e" integrity sha512-2OAOg/Ob5yx9Et7ZX4CvTCc0UFoZHwLEJ+dpDPSUi5TgwwlTlX47w+iFRrEwzUZwYACjq83cgjS/Da50Ga37uw== From 53b1fc73c5426bc2a59621e3cfae5fdb9b1e7563 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 30 May 2025 02:48:03 -0700 Subject: [PATCH 72/75] Fixing CLI behavior and ensuring it runs correctly even when hitting file system roots update default path to just run at the directory it's called from --- packages/token-analyzer/package.json | 2 +- packages/token-analyzer/src/findTsConfigPath.ts | 12 ++++++++++-- packages/token-analyzer/src/index.ts | 11 ++++++++--- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/packages/token-analyzer/package.json b/packages/token-analyzer/package.json index 99d4a15ef..9a6286ea4 100644 --- a/packages/token-analyzer/package.json +++ b/packages/token-analyzer/package.json @@ -16,7 +16,7 @@ "test:debug": "node --loader ts-node/esm --inspect-brk node_modules/.bin/jest --runInBand" }, "bin": { - "token-analyzer": "./index.js" + "token-analyzer": "./lib-commonjs/index.js" }, "private": true } diff --git a/packages/token-analyzer/src/findTsConfigPath.ts b/packages/token-analyzer/src/findTsConfigPath.ts index 53c652ade..5fa32daeb 100644 --- a/packages/token-analyzer/src/findTsConfigPath.ts +++ b/packages/token-analyzer/src/findTsConfigPath.ts @@ -10,8 +10,16 @@ export function findTsConfigPath(startDir = __dirname): string | null { if (fs.existsSync(tsConfigPath)) { return tsConfigPath; } - // Move up to parent directory - currentDir = path.dirname(currentDir); + + // Check if we've hit the file system root dir and bail if we have and haven't found a tsconfig.json + // This prevents infinite loops in case of misconfigured paths + if (currentDir === path.dirname(currentDir)) { + console.warn(`Hit the root directory looking for tsconfig. Stopping search for tsconfig.json.`); + return null; + } else { + // Move up to parent directory + currentDir = path.dirname(currentDir); + } } // Check root directory as well diff --git a/packages/token-analyzer/src/index.ts b/packages/token-analyzer/src/index.ts index f1857d741..d076f9710 100644 --- a/packages/token-analyzer/src/index.ts +++ b/packages/token-analyzer/src/index.ts @@ -30,7 +30,7 @@ async function analyzeProjectStyles( const project = new Project({ // Get the nearest tsconfig.json file so we can resolve modules and paths correctly based on the project config - tsConfigFilePath: findTsConfigPath(rootDir) || '', + tsConfigFilePath: findTsConfigPath(rootDir) ?? undefined, skipAddingFilesFromTsConfig: true, skipFileDependencyResolution: false, }); @@ -113,7 +113,12 @@ interface CliArgs { } // CLI execution -const isRunningDirectly = process.argv[1].includes('index'); +const isRunningDirectly = + require.main === module || // Standard Node.js detection + process.argv[1].includes('token-analyzer') || // When run as global CLI + process.argv[1].endsWith('index.js') || // When run directly + process.argv[1].includes('index'); // Your original check + if (isRunningDirectly) { const argv = yargs(hideBin(process.argv)) .usage('$0 [options]', 'Analyze project styles and token usage') @@ -121,7 +126,7 @@ if (isRunningDirectly) { alias: 'r', describe: 'Root directory to analyze', type: 'string', - default: './src', + default: '.', }) .option('output', { alias: 'o', From 40598b8b6a9ddc3620592e3c53d0388617af6702 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Fri, 6 Jun 2025 12:01:13 -0700 Subject: [PATCH 73/75] export types. --- packages/token-analyzer/src/index.ts | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/packages/token-analyzer/src/index.ts b/packages/token-analyzer/src/index.ts index d076f9710..e5360a6c3 100644 --- a/packages/token-analyzer/src/index.ts +++ b/packages/token-analyzer/src/index.ts @@ -183,3 +183,16 @@ if (isRunningDirectly) { } export { analyzeProjectStyles }; +export type { + AnalysisResults, + FileAnalysis, + KnownTokenImportsAndModules, + StyleAnalysis, + StyleCondition, + StyleContent, + StyleMetadata, + StyleTokens, + TokenMap, + TokenReference, + TokenResolverInfo, +} from './types'; From 642e94629413002393cb9975e72e35fa36cd623e Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 18 Jun 2025 16:35:06 -0700 Subject: [PATCH 74/75] update todos and json output examples. --- packages/token-analyzer/README.md | 153 ++++++++++++++++++++++++++++++ 1 file changed, 153 insertions(+) diff --git a/packages/token-analyzer/README.md b/packages/token-analyzer/README.md index ca8fd82b0..829645f17 100644 --- a/packages/token-analyzer/README.md +++ b/packages/token-analyzer/README.md @@ -16,6 +16,8 @@ The tool first scans for the common pattern of `styles` or `style` being in the - Add ability to customize glob used to find style files - Add ability to add known tokens - Read gitignore from target dir and use that for ignore if we find one. (currently hard coded). +- Add 'thorough' or 'complete' mode that doesn't filter files based on `style` or styles` in the name. +- ## Installation @@ -118,6 +120,157 @@ async function analyze() { } ``` +## Example JSON Output + +Below is a simplification of styles output that the tool might produce. Note that the `assignedVariables` field corresponds to the key name under `styleConditions`. + +```json +{ + "useButtonStyles.styles.ts": { + "styles": { + "useRootBaseClassName": { + "resetStyles": { + "tokens": [ + { + "property": "backgroundColor", + "token": ["tokens.colorNeutralBackground1"], + "path": ["backgroundColor"] + }, + { + "property": "color", + "token": ["semanticTokens.cornerFlyoutRest"], + "path": ["color"] + }, + { + "property": "border", + "token": ["tokens.strokeWidthThin"], + "path": ["border"] + }, + { + "property": "border", + "token": ["tokens.colorNeutralStroke1"], + "path": ["border"] + }, + { + "property": "fontFamily", + "token": ["textStyleAiHeaderFontfamily"], + "path": ["fontFamily"] + }, + { + "property": "padding", + "token": ["tokens.spacingHorizontalM"], + "path": ["padding"] + }, + { + "property": "borderRadius", + "token": ["tokens.borderRadiusMedium"], + "path": ["borderRadius"] + }, + { + "property": "fontSize", + "token": ["tokens.fontSizeBase300"], + "path": ["fontSize"] + }, + { + "property": "fontWeight", + "token": ["tokens.fontWeightSemibold"], + "path": ["fontWeight"] + }, + { + "property": "lineHeight", + "token": ["tokens.lineHeightBase300"], + "path": ["lineHeight"] + }, + { + "property": "transitionDuration", + "token": ["tokens.durationFaster"], + "path": ["transitionDuration"] + }, + { + "property": "transitionTimingFunction", + "token": ["tokens.curveEasyEase"], + "path": ["transitionTimingFunction"] + } + ], + "nested": { + "':hover'": { + "tokens": [ + { + "property": "backgroundColor", + "token": ["cornerCtrlLgHoverRaw"], + "path": ["':hover'", "backgroundColor"] + }, + { + "property": "borderColor", + "token": ["ctrlLinkForegroundBrandHover"], + "path": ["':hover'", "borderColor"] + }, + { + "property": "color", + "token": ["tokens.colorNeutralForeground1Hover"], + "path": ["':hover'", "color"] + } + ] + } + }, + "isResetStyles": true, + "assignedVariables": ["rootBaseClassName"] + } + }, + "useRootDisabledStyles": { + "base": { + "tokens": [ + { + "property": "backgroundColor", + "token": ["tokens.colorNeutralBackgroundDisabled"], + "path": ["backgroundColor"] + }, + { + "property": "borderTopColor", + "token": ["tokens.colorNeutralStrokeDisabled"], + "path": ["borderTopColor"] + }, + { + "property": "borderRightColor", + "token": ["tokens.colorNeutralStrokeDisabled"], + "path": ["borderRightColor"] + }, + { + "property": "borderBottomColor", + "token": ["tokens.colorNeutralStrokeDisabled"], + "path": ["borderBottomColor"] + }, + { + "property": "borderLeftColor", + "token": ["tokens.colorNeutralStrokeDisabled"], + "path": ["borderLeftColor"] + }, + { + "property": "color", + "token": ["tokens.colorNeutralForegroundDisabled"], + "path": ["color"] + } + ], + "assignedVariables": ["rootDisabledStyles"] + } + } + }, + "metadata": { + "styleConditions": { + "rootBaseClassName": { + "isBase": true, + "slotName": "root" + }, + "rootDisabledStyles.base": { + "conditions": ["(disabled || disabledFocusable)"], + "slotName": "root" + } + } + } + } +} +``` + ## Configuration The analyzer identifies style files based on naming conventions. By default, it looks for: From e022f0e20f923e4298625bc355dccdc466123a05 Mon Sep 17 00:00:00 2001 From: Brandon Thomas Date: Wed, 18 Jun 2025 16:35:23 -0700 Subject: [PATCH 75/75] remove optional from `StyleCondition` --- packages/token-analyzer/src/types.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/token-analyzer/src/types.ts b/packages/token-analyzer/src/types.ts index 50f90e371..085dc0742 100644 --- a/packages/token-analyzer/src/types.ts +++ b/packages/token-analyzer/src/types.ts @@ -26,7 +26,7 @@ export interface StyleAnalysis { export interface StyleCondition { style: string; - condition?: string; + condition: string; } export interface StyleMetadata {