Tailwind css added

This commit is contained in:
Aravind142857
2023-06-21 15:56:08 -05:00
parent 0d2619b9c0
commit cc421f40c6
1574 changed files with 277349 additions and 177 deletions

91
node_modules/tailwindcss/src/oxide/cli/build/deps.ts generated vendored Normal file
View File

@@ -0,0 +1,91 @@
import packageJson from '../../../../package.json'
import browserslist from 'browserslist'
import { Result } from 'postcss'
import {
// @ts-ignore
lazyPostcss,
// @ts-ignore
lazyPostcssImport,
// @ts-ignore
lazyCssnano,
// @ts-ignore
} from '../../../../peers/index'
export function lazyLightningCss() {
// TODO: Make this lazy/bundled
return require('lightningcss')
}
let lightningCss
function loadLightningCss() {
if (lightningCss) {
return lightningCss
}
// Try to load a local version first
try {
return (lightningCss = require('lightningcss'))
} catch {}
return (lightningCss = lazyLightningCss())
}
export async function lightningcss(shouldMinify: boolean, result: Result) {
let css = loadLightningCss()
try {
let transformed = css.transform({
filename: result.opts.from || 'input.css',
code: Buffer.from(result.css, 'utf-8'),
minify: shouldMinify,
sourceMap: !!result.map,
inputSourceMap: result.map ? result.map.toString() : undefined,
targets: css.browserslistToTargets(browserslist(packageJson.browserslist)),
drafts: {
nesting: true,
},
})
return Object.assign(result, {
css: transformed.code.toString('utf8'),
map: result.map
? Object.assign(result.map, {
toString() {
return transformed.map.toString()
},
})
: result.map,
})
} catch (err) {
console.error('Unable to use Lightning CSS. Using raw version instead.')
console.error(err)
return result
}
}
/**
* @returns {import('postcss')}
*/
export function loadPostcss() {
// Try to load a local `postcss` version first
try {
return require('postcss')
} catch {}
return lazyPostcss()
}
export function loadPostcssImport() {
// Try to load a local `postcss-import` version first
try {
return require('postcss-import')
} catch {}
return lazyPostcssImport()
}

47
node_modules/tailwindcss/src/oxide/cli/build/index.ts generated vendored Normal file
View File

@@ -0,0 +1,47 @@
import fs from 'fs'
import path from 'path'
import { resolveDefaultConfigPath } from '../../../util/resolveConfigPath'
import { createProcessor } from './plugin'
export async function build(args) {
let input = args['--input']
let shouldWatch = args['--watch']
// TODO: Deprecate this in future versions
if (!input && args['_'][1]) {
console.error('[deprecation] Running tailwindcss without -i, please provide an input file.')
input = args['--input'] = args['_'][1]
}
if (input && input !== '-' && !fs.existsSync((input = path.resolve(input)))) {
console.error(`Specified input file ${args['--input']} does not exist.`)
process.exit(9)
}
if (args['--config'] && !fs.existsSync((args['--config'] = path.resolve(args['--config'])))) {
console.error(`Specified config file ${args['--config']} does not exist.`)
process.exit(9)
}
// TODO: Reference the @config path here if exists
let configPath = args['--config'] ? args['--config'] : resolveDefaultConfigPath()
let processor = await createProcessor(args, configPath)
if (shouldWatch) {
// Abort the watcher if stdin is closed to avoid zombie processes
// You can disable this behavior with --watch=always
if (args['--watch'] !== 'always') {
process.stdin.on('end', () => process.exit(0))
}
process.stdin.resume()
await processor.watch()
} else {
await processor.build().catch((e) => {
console.error(e)
process.exit(1)
})
}
}

442
node_modules/tailwindcss/src/oxide/cli/build/plugin.ts generated vendored Normal file
View File

@@ -0,0 +1,442 @@
import path from 'path'
import fs from 'fs'
import postcssrc from 'postcss-load-config'
import { lilconfig } from 'lilconfig'
import loadPlugins from 'postcss-load-config/src/plugins' // Little bit scary, looking at private/internal API
import loadOptions from 'postcss-load-config/src/options' // Little bit scary, looking at private/internal API
import tailwind from '../../../processTailwindFeatures'
import { loadPostcss, loadPostcssImport, lightningcss } from './deps'
import { formatNodes, drainStdin, outputFile } from './utils'
import { env } from '../../../lib/sharedState'
import resolveConfig from '../../../../resolveConfig'
import { parseCandidateFiles } from '../../../lib/content'
import { createWatcher } from './watching'
import fastGlob from 'fast-glob'
import { findAtConfigPath } from '../../../lib/findAtConfigPath'
import log from '../../../util/log'
import { loadConfig } from '../../../lib/load-config'
import getModuleDependencies from '../../../lib/getModuleDependencies'
import type { Config } from '../../../../types'
/**
*
* @param {string} [customPostCssPath ]
* @returns
*/
async function loadPostCssPlugins(customPostCssPath) {
let config = customPostCssPath
? await (async () => {
let file = path.resolve(customPostCssPath)
// Implementation, see: https://unpkg.com/browse/postcss-load-config@3.1.0/src/index.js
// @ts-ignore
let { config = {} } = await lilconfig('postcss').load(file)
if (typeof config === 'function') {
config = config()
} else {
config = Object.assign({}, config)
}
if (!config.plugins) {
config.plugins = []
}
return {
file,
plugins: loadPlugins(config, file),
options: loadOptions(config, file),
}
})()
: await postcssrc()
let configPlugins = config.plugins
let configPluginTailwindIdx = configPlugins.findIndex((plugin) => {
if (typeof plugin === 'function' && plugin.name === 'tailwindcss') {
return true
}
if (typeof plugin === 'object' && plugin !== null && plugin.postcssPlugin === 'tailwindcss') {
return true
}
return false
})
let beforePlugins =
configPluginTailwindIdx === -1 ? [] : configPlugins.slice(0, configPluginTailwindIdx)
let afterPlugins =
configPluginTailwindIdx === -1
? configPlugins
: configPlugins.slice(configPluginTailwindIdx + 1)
return [beforePlugins, afterPlugins, config.options]
}
function loadBuiltinPostcssPlugins() {
let postcss = loadPostcss()
let IMPORT_COMMENT = '__TAILWIND_RESTORE_IMPORT__: '
return [
[
(root) => {
root.walkAtRules('import', (rule) => {
if (rule.params.slice(1).startsWith('tailwindcss/')) {
rule.after(postcss.comment({ text: IMPORT_COMMENT + rule.params }))
rule.remove()
}
})
},
loadPostcssImport(),
(root) => {
root.walkComments((rule) => {
if (rule.text.startsWith(IMPORT_COMMENT)) {
rule.after(
postcss.atRule({
name: 'import',
params: rule.text.replace(IMPORT_COMMENT, ''),
})
)
rule.remove()
}
})
},
],
[],
{},
]
}
let state = {
/** @type {any} */
context: null,
/** @type {ReturnType<typeof createWatcher> | null} */
watcher: null,
/** @type {{content: string, extension: string}[]} */
changedContent: [],
/** @type {{config: Config, dependencies: Set<string>, dispose: Function } | null} */
configBag: null,
contextDependencies: new Set(),
/** @type {import('../../lib/content.js').ContentPath[]} */
contentPaths: [],
refreshContentPaths() {
this.contentPaths = parseCandidateFiles(this.context, this.context?.tailwindConfig)
},
get config() {
return this.context.tailwindConfig
},
get contentPatterns() {
return {
all: this.contentPaths.map((contentPath) => contentPath.pattern),
dynamic: this.contentPaths
.filter((contentPath) => contentPath.glob !== undefined)
.map((contentPath) => contentPath.pattern),
}
},
loadConfig(configPath, content) {
if (this.watcher && configPath) {
this.refreshConfigDependencies()
}
let config = loadConfig(configPath)
let dependencies = getModuleDependencies(configPath)
this.configBag = {
config,
dependencies,
dispose() {
for (let file of dependencies) {
delete require.cache[require.resolve(file)]
}
},
}
// @ts-ignore
this.configBag.config = resolveConfig(this.configBag.config, { content: { files: [] } })
// Override content files if `--content` has been passed explicitly
if (content?.length > 0) {
this.configBag.config.content.files = content
}
return this.configBag.config
},
refreshConfigDependencies(configPath) {
env.DEBUG && console.time('Module dependencies')
this.configBag?.dispose()
env.DEBUG && console.timeEnd('Module dependencies')
},
readContentPaths() {
let content = []
// Resolve globs from the content config
// TODO: When we make the postcss plugin async-capable this can become async
let files = fastGlob.sync(this.contentPatterns.all)
for (let file of files) {
if (__OXIDE__) {
content.push({
file,
extension: path.extname(file).slice(1),
})
} else {
content.push({
content: fs.readFileSync(path.resolve(file), 'utf8'),
extension: path.extname(file).slice(1),
})
}
}
// Resolve raw content in the tailwind config
let rawContent = this.config.content.files.filter((file) => {
return file !== null && typeof file === 'object'
})
for (let { raw: htmlContent, extension = 'html' } of rawContent) {
content.push({ content: htmlContent, extension })
}
return content
},
getContext({ createContext, cliConfigPath, root, result, content }) {
if (this.context) {
this.context.changedContent = this.changedContent.splice(0)
return this.context
}
env.DEBUG && console.time('Searching for config')
let configPath = findAtConfigPath(root, result) ?? cliConfigPath
env.DEBUG && console.timeEnd('Searching for config')
env.DEBUG && console.time('Loading config')
let config = this.loadConfig(configPath, content)
env.DEBUG && console.timeEnd('Loading config')
env.DEBUG && console.time('Creating context')
this.context = createContext(config, [])
Object.assign(this.context, {
userConfigPath: configPath,
})
env.DEBUG && console.timeEnd('Creating context')
env.DEBUG && console.time('Resolving content paths')
this.refreshContentPaths()
env.DEBUG && console.timeEnd('Resolving content paths')
if (this.watcher) {
env.DEBUG && console.time('Watch new files')
this.watcher.refreshWatchedFiles()
env.DEBUG && console.timeEnd('Watch new files')
}
for (let file of this.readContentPaths()) {
this.context.changedContent.push(file)
}
return this.context
},
}
export async function createProcessor(args, cliConfigPath) {
let postcss = loadPostcss()
let input = args['--input']
let output = args['--output']
let includePostCss = args['--postcss']
let customPostCssPath = typeof args['--postcss'] === 'string' ? args['--postcss'] : undefined
let [beforePlugins, afterPlugins, postcssOptions] = includePostCss
? await loadPostCssPlugins(customPostCssPath)
: loadBuiltinPostcssPlugins()
if (args['--purge']) {
log.warn('purge-flag-deprecated', [
'The `--purge` flag has been deprecated.',
'Please use `--content` instead.',
])
if (!args['--content']) {
args['--content'] = args['--purge']
}
}
let content = args['--content']?.split(/(?<!{[^}]+),/) ?? []
let tailwindPlugin = () => {
return {
postcssPlugin: 'tailwindcss',
Once(root, { result }) {
env.DEBUG && console.time('Compiling CSS')
tailwind(({ createContext }) => {
console.error()
console.error('Rebuilding...')
return () => {
return state.getContext({
createContext,
cliConfigPath,
root,
result,
content,
})
}
})(root, result)
env.DEBUG && console.timeEnd('Compiling CSS')
},
}
}
tailwindPlugin.postcss = true
let plugins = [
...beforePlugins,
tailwindPlugin,
!args['--minify'] && formatNodes,
...afterPlugins,
].filter(Boolean)
/** @type {import('postcss').Processor} */
// @ts-ignore
let processor = postcss(plugins)
async function readInput() {
// Piping in data, let's drain the stdin
if (input === '-') {
return drainStdin()
}
// Input file has been provided
if (input) {
return fs.promises.readFile(path.resolve(input), 'utf8')
}
// No input file provided, fallback to default atrules
return '@tailwind base; @tailwind components; @tailwind utilities'
}
async function build() {
let start = process.hrtime.bigint()
return readInput()
.then((css) => processor.process(css, { ...postcssOptions, from: input, to: output }))
.then((result) => lightningcss(!!args['--minify'], result))
.then((result) => {
if (!state.watcher) {
return result
}
env.DEBUG && console.time('Recording PostCSS dependencies')
for (let message of result.messages) {
if (message.type === 'dependency') {
state.contextDependencies.add(message.file)
}
}
env.DEBUG && console.timeEnd('Recording PostCSS dependencies')
// TODO: This needs to be in a different spot
env.DEBUG && console.time('Watch new files')
state.watcher.refreshWatchedFiles()
env.DEBUG && console.timeEnd('Watch new files')
return result
})
.then((result) => {
if (!output) {
process.stdout.write(result.css)
return
}
return Promise.all([
outputFile(result.opts.to, result.css),
result.map && outputFile(result.opts.to + '.map', result.map.toString()),
])
})
.then(() => {
let end = process.hrtime.bigint()
console.error()
console.error('Done in', (end - start) / BigInt(1e6) + 'ms.')
})
.then(
() => {},
(err) => {
// TODO: If an initial build fails we can't easily pick up any PostCSS dependencies
// that were collected before the error occurred
// The result is not stored on the error so we have to store it externally
// and pull the messages off of it here somehow
// This results in a less than ideal DX because the watcher will not pick up
// changes to imported CSS if one of them caused an error during the initial build
// If you fix it and then save the main CSS file so there's no error
// The watcher will start watching the imported CSS files and will be
// resilient to future errors.
if (state.watcher) {
console.error(err)
} else {
return Promise.reject(err)
}
}
)
}
/**
* @param {{file: string, content(): Promise<string>, extension: string}[]} changes
*/
async function parseChanges(changes) {
return Promise.all(
changes.map(async (change) => ({
content: await change.content(),
extension: change.extension,
}))
)
}
if (input !== undefined && input !== '-') {
state.contextDependencies.add(path.resolve(input))
}
return {
build,
watch: async () => {
state.watcher = createWatcher(args, {
state,
/**
* @param {{file: string, content(): Promise<string>, extension: string}[]} changes
*/
async rebuild(changes) {
let needsNewContext = changes.some((change) => {
return (
state.configBag?.dependencies.has(change.file) ||
state.contextDependencies.has(change.file)
)
})
if (needsNewContext) {
state.context = null
} else {
for (let change of await parseChanges(changes)) {
state.changedContent.push(change)
}
}
return build()
},
})
await build()
},
}
}

74
node_modules/tailwindcss/src/oxide/cli/build/utils.ts generated vendored Normal file
View File

@@ -0,0 +1,74 @@
import fs from 'fs'
import path from 'path'
export function indentRecursive(node, indent = 0) {
node.each &&
node.each((child, i) => {
if (!child.raws.before || !child.raws.before.trim() || child.raws.before.includes('\n')) {
child.raws.before = `\n${node.type !== 'rule' && i > 0 ? '\n' : ''}${' '.repeat(indent)}`
}
child.raws.after = `\n${' '.repeat(indent)}`
indentRecursive(child, indent + 1)
})
}
export function formatNodes(root) {
indentRecursive(root)
if (root.first) {
root.first.raws.before = ''
}
}
/**
* When rapidly saving files atomically a couple of situations can happen:
* - The file is missing since the external program has deleted it by the time we've gotten around to reading it from the earlier save.
* - The file is being written to by the external program by the time we're going to read it and is thus treated as busy because a lock is held.
*
* To work around this we retry reading the file a handful of times with a delay between each attempt
*
* @param {string} path
* @param {number} tries
* @returns {Promise<string | undefined>}
* @throws {Error} If the file is still missing or busy after the specified number of tries
*/
export async function readFileWithRetries(path, tries = 5) {
for (let n = 0; n <= tries; n++) {
try {
return await fs.promises.readFile(path, 'utf8')
} catch (err) {
if (n !== tries) {
if (err.code === 'ENOENT' || err.code === 'EBUSY') {
await new Promise((resolve) => setTimeout(resolve, 10))
continue
}
}
throw err
}
}
}
export function drainStdin() {
return new Promise((resolve, reject) => {
let result = ''
process.stdin.on('data', (chunk) => {
result += chunk
})
process.stdin.on('end', () => resolve(result))
process.stdin.on('error', (err) => reject(err))
})
}
export async function outputFile(file, newContents) {
try {
let currentContents = await fs.promises.readFile(file, 'utf8')
if (currentContents === newContents) {
return // Skip writing the file
}
} catch {}
// Write the file
await fs.promises.mkdir(path.dirname(file), { recursive: true })
await fs.promises.writeFile(file, newContents, 'utf8')
}

View File

@@ -0,0 +1,225 @@
import chokidar from 'chokidar'
import fs from 'fs'
import micromatch from 'micromatch'
import normalizePath from 'normalize-path'
import path from 'path'
import { readFileWithRetries } from './utils'
/**
* The core idea of this watcher is:
* 1. Whenever a file is added, changed, or renamed we queue a rebuild
* 2. Perform as few rebuilds as possible by batching them together
* 3. Coalesce events that happen in quick succession to avoid unnecessary rebuilds
* 4. Ensure another rebuild happens _if_ changed while a rebuild is in progress
*/
/**
*
* @param {*} args
* @param {{ state, rebuild(changedFiles: any[]): Promise<any> }} param1
* @returns {{
* fswatcher: import('chokidar').FSWatcher,
* refreshWatchedFiles(): void,
* }}
*/
export function createWatcher(args, { state, rebuild }) {
let shouldPoll = args['--poll']
let shouldCoalesceWriteEvents = shouldPoll || process.platform === 'win32'
// Polling interval in milliseconds
// Used only when polling or coalescing add/change events on Windows
let pollInterval = 10
let watcher = chokidar.watch([], {
// Force checking for atomic writes in all situations
// This causes chokidar to wait up to 100ms for a file to re-added after it's been unlinked
// This only works when watching directories though
atomic: true,
usePolling: shouldPoll,
interval: shouldPoll ? pollInterval : undefined,
ignoreInitial: true,
awaitWriteFinish: shouldCoalesceWriteEvents
? {
stabilityThreshold: 50,
pollInterval: pollInterval,
}
: false,
})
// A queue of rebuilds, file reads, etc… to run
let chain = Promise.resolve()
/**
* A list of files that have been changed since the last rebuild
*
* @type {{file: string, content: () => Promise<string>, extension: string}[]}
*/
let changedContent = []
/**
* A list of files for which a rebuild has already been queued.
* This is used to prevent duplicate rebuilds when multiple events are fired for the same file.
* The rebuilt file is cleared from this list when it's associated rebuild has _started_
* This is because if the file is changed during a rebuild it won't trigger a new rebuild which it should
**/
let pendingRebuilds = new Set()
let _timer
let _reject
/**
* Rebuilds the changed files and resolves when the rebuild is
* complete regardless of whether it was successful or not
*/
async function rebuildAndContinue() {
let changes = changedContent.splice(0)
// There are no changes to rebuild so we can just do nothing
if (changes.length === 0) {
return Promise.resolve()
}
// Clear all pending rebuilds for the about-to-be-built files
changes.forEach((change) => pendingRebuilds.delete(change.file))
// Resolve the promise even when the rebuild fails
return rebuild(changes).then(
() => {},
() => {}
)
}
/**
*
* @param {*} file
* @param {(() => Promise<string>) | null} content
* @param {boolean} skipPendingCheck
* @returns {Promise<void>}
*/
function recordChangedFile(file, content = null, skipPendingCheck = false) {
file = path.resolve(file)
// Applications like Vim/Neovim fire both rename and change events in succession for atomic writes
// In that case rebuild has already been queued by rename, so can be skipped in change
if (pendingRebuilds.has(file) && !skipPendingCheck) {
return Promise.resolve()
}
// Mark that a rebuild of this file is going to happen
// It MUST happen synchronously before the rebuild is queued for this to be effective
pendingRebuilds.add(file)
changedContent.push({
file,
content: content ?? (() => fs.promises.readFile(file, 'utf8')),
extension: path.extname(file).slice(1),
})
if (_timer) {
clearTimeout(_timer)
_reject()
}
// If a rebuild is already in progress we don't want to start another one until the 10ms timer has expired
chain = chain.then(
() =>
new Promise((resolve, reject) => {
_timer = setTimeout(resolve, 10)
_reject = reject
})
)
// Resolves once this file has been rebuilt (or the rebuild for this file has failed)
// This queues as many rebuilds as there are changed files
// But those rebuilds happen after some delay
// And will immediately resolve if there are no changes
chain = chain.then(rebuildAndContinue, rebuildAndContinue)
return chain
}
watcher.on('change', (file) => recordChangedFile(file))
watcher.on('add', (file) => recordChangedFile(file))
// Restore watching any files that are "removed"
// This can happen when a file is pseudo-atomically replaced (a copy is created, overwritten, the old one is unlinked, and the new one is renamed)
// TODO: An an optimization we should allow removal when the config changes
watcher.on('unlink', (file) => {
file = normalizePath(file)
// Only re-add the file if it's not covered by a dynamic pattern
if (!micromatch.some([file], state.contentPatterns.dynamic)) {
watcher.add(file)
}
})
// Some applications such as Visual Studio (but not VS Code)
// will only fire a rename event for atomic writes and not a change event
// This is very likely a chokidar bug but it's one we need to work around
// We treat this as a change event and rebuild the CSS
watcher.on('raw', (evt, filePath, meta) => {
if (evt !== 'rename') {
return
}
let watchedPath = meta.watchedPath
// Watched path might be the file itself
// Or the directory it is in
filePath = watchedPath.endsWith(filePath) ? watchedPath : path.join(watchedPath, filePath)
// Skip this event since the files it is for does not match any of the registered content globs
if (!micromatch.some([filePath], state.contentPatterns.all)) {
return
}
// Skip since we've already queued a rebuild for this file that hasn't happened yet
if (pendingRebuilds.has(filePath)) {
return
}
// We'll go ahead and add the file to the pending rebuilds list here
// It'll be removed when the rebuild starts unless the read fails
// which will be taken care of as well
pendingRebuilds.add(filePath)
async function enqueue() {
try {
// We need to read the file as early as possible outside of the chain
// because it may be gone by the time we get to it. doing the read
// immediately increases the chance that the file is still there
let content = await readFileWithRetries(path.resolve(filePath))
if (content === undefined) {
return
}
// This will push the rebuild onto the chain
// We MUST skip the rebuild check here otherwise the rebuild will never happen on Linux
// This is because the order of events and timing is different on Linux
// @ts-ignore: TypeScript isn't picking up that content is a string here
await recordChangedFile(filePath, () => content, true)
} catch {
// If reading the file fails, it's was probably a deleted temporary file
// So we can ignore it and no rebuild is needed
}
}
enqueue().then(() => {
// If the file read fails we still need to make sure the file isn't stuck in the pending rebuilds list
pendingRebuilds.delete(filePath)
})
})
return {
fswatcher: watcher,
refreshWatchedFiles() {
watcher.add(Array.from(state.contextDependencies))
watcher.add(Array.from(state.configBag.dependencies))
watcher.add(state.contentPatterns.all)
},
}
}