Generally available as lit.utils
With some exceptions, inludes lit.utils.React
for convenience.
export const NoOp = () => {}
export const Identity = x => x
export const AsInt = x => parseInt(x)
export const Undef = x => typeof x === 'undefined'
export const getMeta = (key,def) => {
if (typeof document === "undefined" || !document.querySelector) return def;
const el = document.querySelector(`meta[name="lit${key}"]`)
const val = el ? el.getAttribute('value') : def
return val
}
export const posstr = pos => {
return pos ? `${pos.line}:${pos.column}-${pos.offset}` : undefined
}
export const wait = async (ms) => {
return new Promise(resolve => {
setTimeout(resolve, ms);
});
}
export const template = (templateString, templateVars) => {
console.log(templateString)
const escaped = templateString.replace(/`/g, '\\`')
console.log(escaped)
const body = "return `"+ escaped +"`;"
console.log(body)
return new Function(body).call(templateVars)
}
import { NoOp } from './functions'
const ROOT_NS = '.lit'
const ROOT_PREFIX = `${ROOT_NS}`
const debugKeys = (...args) => {
let debugStr = ''
if (typeof process !== 'undefined' && process.env && process.env.DEBUG) {
debugStr = process.env.DEBUG
}
if (typeof window !== 'undefined' && window.location) {
const debugKeys = localStorage.getItem('litDebug') || ''
if (debugKeys) debugStr = debugKeys
}
return debugStr.split(',')
}
// console.log("[console] log mask:", debugKeys().join(',') )
const shouldLog = ns => {
const keys = debugKeys()
if (keys.indexOf(`-${ns}`) >= 0) return false;
return keys.indexOf('*') >= 0 || keys.indexOf('All') >= 0 || keys.indexOf(ns) >= 0
}
const level = function(level, fn) {
const lvlIndent = Array(level).fill(' ').join('')
return function(...args) {
if (level <= debug_level() || shouldLog(level)) fn(`[lit]{${level}}${lvlIndent}`, ...args)
}
}
const prefixArgs = (prefix, fn, self) => {
return (...args) => {
const newArgs = [prefix, ...args]
fn.apply( self, newArgs)
}
}
const getConsole = (ns) => {
const prefix = `[${ROOT_PREFIX}:${ns}] `
return {
level: level,
log: prefixArgs(prefix, console.log, console),
dir: prefixArgs(prefix, console.dir, console),
info: prefixArgs(prefix, console.info, console),
error: prefixArgs(prefix, console.error, console),
time: console.time,
timeEnd: console.timeEnd,
getConsoleForNamespace,
}
}
export const Console = getConsole(ROOT_NS);
export function getConsoleForNamespace(ns) {
if (shouldLog(ns)) {
return getConsole(ns)
} else {
if (debugKeys()[0] !== 'None') console.log(`[${ROOT_PREFIX}] Hiding console for NS "${ns}"`)
const prefix = `[${ROOT_PREFIX}:${ns}] `
return {
level: NoOp,
log: NoOp,
dir: NoOp,
info: NoOp,
error: prefixArgs(prefix, console.error, console),
time: NoOp,
timeEnd: NoOp,
}
}
}
function hashCode(str) {
var hash = 0;
for (var i = 0; i < str.length; i++) {
hash = str.charCodeAt(i) + ((hash << 5) - hash);
}
return hash;
}
// Convert an int to hexadecimal with a max length
// of six characters.
function intToARGB(i) {
var hex = ((i>>24)&0xFF).toString(16) +
((i>>16)&0xFF).toString(16) +
((i>>8)&0xFF).toString(16) +
(i&0xFF).toString(16);
// Sometimes the string returned will be too short so we
// add zeros to pad it out, which later get removed if
// the length is greater than six.
hex += '000000';
return '#' + hex.substring(0, 6);
}
export function pickTextColorBasedOnBgColor(bgColor, lightColor, darkColor) {
var color = (bgColor.charAt(0) === '#') ? bgColor.substring(1, 7) : bgColor;
var r = parseInt(color.substring(0, 2), 16); // hexToR
var g = parseInt(color.substring(2, 4), 16); // hexToG
var b = parseInt(color.substring(4, 6), 16); // hexToB
var uicolors = [r / 255, g / 255, b / 255];
var c = uicolors.map((col) => {
if (col <= 0.03928) {
return col / 12.92;
}
return Math.pow((col + 0.055) / 1.055, 2.4);
});
var L = (0.2126 * c[0]) + (0.7152 * c[1]) + (0.0722 * c[2]);
return (L > 0.179) ? darkColor : lightColor;
}
export function stringToHex (str) {
return intToARGB(hashCode(str))
}
var newlineRegex = /\n/g;
export const btoa = (str) => {
if (typeof window === 'undefined' || !window.btoa) {
// const Buffer = require('buffer')
return Buffer.from(str, 'binary').toString('base64')
} else return window.btoa(str)
}
export const atob = (str) => {
if (typeof window === 'undefined' || !window.atob) {
// const Buffer = require('buffer')
return Buffer.from(str, 'base64').toString('binary')
}
else return window.atob(str)
}
export function b64EncodeUnicode(str) {
return btoa(encodeURIComponent(str).replace(/%([0-9A-F]{2})/g, function(match, p1) {
return String.fromCharCode('0x' + p1);
}));
}
export function b64DecodeUnicode(str) {
// atob on Mobile Safari for iOS 9 will throw an exception if there's a newline.
var b64Decoded = atob(str.replace(newlineRegex, ''));
var decodedWithUnicodeHexesRestored = Array.prototype.map.call(
b64Decoded,
hexEncodeCharCode
)
.join('');
return decodeURIComponent(decodedWithUnicodeHexesRestored);
}
function hexEncodeCharCode(c) {
return '%' + ('00' + c.charCodeAt(0).toString(16)).slice(-2);
}
Unist utils
Various unist-util-*
utilities used by .lit
and some custom built, exposed for convenience under lit.utils.unist
const patchSource = (src, originalLocation, value) => {
const pos = originalLocation.position || originalLocation
if (!pos) throw Error("No location to patch")
return src.slice(0, pos.start.offset) + value + src.slice(pos.end.offset);
}
export default patchSource
import filter from 'unist-util-filter'
import {selectAll as select} from 'unist-util-select'
import { getConsoleForNamespace } from './console'
const console = getConsoleForNamespace('util')
export const atPos = pos => (node) => {
const pos2 = node.position
if (!pos2 || !pos2.start || !pos2.end) {
console.error("no pos", node)
return false
}
const startInside = (pos2.start.line >= pos.start.line
&& pos2.start.line <= pos.end.line)
const endInside = (pos2.end.line >= pos.start.line
&& pos2.end.line <= pos.end.line)
const wraps = pos2.start.line <= pos.start.line
&& pos2.end.line >= pos.end.line
const any = wraps || startInside || endInside
// console.log("atPos: " + node.type, any ,pos2.start.line, pos2.end.line, wraps, startInside, endInside, pos.start.line, pos.end.line)
return any
}
export const selectAll = (type, pos, tree) => {
const filteredTree = filter(tree, atPos(pos))
const nodes = select(type, filteredTree)
return nodes
}
Filesystem utils
The raw [lightning-fs] object is available as lit.lfs
while the extended and customised wrapper used throughout .lit
is at lit.fs
import path from "path";
import { ghWriteFile, ghReadFile, ghDeleteFile } from "../utils/fs-promises-gh-utils";
import { getConsoleForNamespace } from './console'
const console = getConsoleForNamespace('fs')
const passThroughRead = (origReadFile, litroot) => {
return async (...args) => {
console.log('fs.passThroughRead', args[0])
try {
return await origReadFile(...args);
} catch (err) {
if (args[1] && args[1].localOnly) throw err;
const filePath = path.join(litroot, args[0])
console.log('fs.passThroughRead passing through to fetch', filePath)
const resp = await fetch(filePath)
if (resp.status === 404) throw new Error(`404 File ${filePath} not found.`)
return await resp.text();
}
};
}
const passThroughReadWithStat = (origReadFile, origStat, litroot, ghOpts, noPassthrough) => {
return async (...args) => {
console.log('fs.passThroughReadWithStat', litroot, args[0])
const resp = {
local: { stat: undefined, value: undefined },
remote: { stat: undefined, value: undefined },
}
const filePath = args[0] = path.join(litroot, args[0])
try {
try {
resp.local.stat = await origStat(...args)
} catch(err){
console.log("fs.passThoughReadWithStat no stat on local file")
}
const value = await origReadFile(...args)
resp.local.value = value
} catch (err) {
console.log('fs.passThoughReadWithStat no local file', err)
}
let remoteResp
if (ghOpts) {
console.log("fs.passThroughtReadWithStat passing through to GitHub", filePath)
const ghrf = ghReadFile(ghOpts)
try {
remoteResp = await ghrf(filePath)
} catch(err){
console.log("fs.passThroughtReadWithStat GitHub read failed", err)
}
} else if (noPassthrough) {
return resp
} else {
console.log('fs.passThroughReadWithStat passing through to fetch', filePath)
remoteResp = await fetch(filePath)
}
if (!remoteResp || remoteResp.status < 200 || remoteResp.status >= 400) {
if (!resp.local.stat && !resp.local.value) {
console.log('fs.passThroughReadWithStat failed local and remote read')
throw new Error(`${remoteResp?.status || "Request"} Error. Fetching File.`)
}
} else {
console.log("fs.passThroughReadWithStat found remote file")
const value = await remoteResp.text()
const lastModified = remoteResp.headers && remoteResp.headers.get('last-modified')
const contentLength = remoteResp.headers && remoteResp.headers.get('content-length')
const stat = {
dev: 1,
gid: 1,
ino: 1,
uid: 1,
mtimeMs: lastModified && (new Date(lastModified)).getTime(),
size: contentLength,
}
resp.remote = {stat,value}
}
return resp
};
}
const writeFileP = (fs, litroot) => {
const wf = fs.writeFile
return async (...args) => {
console.log("fs.writeFileP ", args[0])
const filepath = (args[0] = litroot + args[0]);
const p = path.parse(filepath);
const parts = p.dir.split(path.sep);
// console.log(`"Parts for "${filepath}"`, parts);
for (var i = 0; i < parts.length; i++) {
// console.log(`[${i}] <--- "${parts[i]}"`);
if (i === 0) {
} else {
const subPath = parts.slice(0, i + 1).join(path.sep);
// console.log(`"${subPath}" Sub path`);
try {
await fs.mkdir(subPath);
} catch (err) {
// console.log(`[fs.writeFileP] "${subPath}" Failed to mkdirpath `);
}
}
}
// console.log("[fs.writeFileP] Writing file", ...args);
return wf(...args);
};
}
const passThroughWrite = (fs,litroot, ghOpts) => {
const wf = fs.writeFile
return async (...args) => {
console.log('fs.passThroughWrite')
await wf(...args);
if (ghOpts && !(args[2] && args[2].localOnly)) {
const ghwf = ghWriteFile(ghOpts);
try {
const ghResp = await ghwf(...args);
console.log("GitHub write resp", ghResp);
} catch (err) {
console.error("GitHub write threw", err);
}
}
};
}
const passThroughUnlink = (fs,litroot, ghOpts) => {
const uf = fs.unlink
return async (filepath, localOnly) => {
console.log('fs.passThroughUnlink')
let local
try {
local = await uf(filepath, localOnly)
} catch (err) {
console.log("fs.passThroughUnlink didn't unlink local file", err)
}
if (localOnly) return local;
if (ghOpts) {
const ghdf = ghDeleteFile(ghOpts);
let ghResp
try {
ghResp = await ghdf(filepath.slice(1));
console.log("GitHub delete resp", ghResp);
} catch (err) {
console.error("GitHub delete threw", err.message, err);
}
return ghResp
} else {
return local
}
};
}
export const extendFs = (fs, litroot = "", ghOpts, noPassthrough) => {
const clonedfs = {...fs}
const origReadFile = clonedfs.readFile
const origStat = clonedfs.stat
if (ghOpts) clonedfs.ghOrigin = true
if (!noPassthrough) clonedfs.readFile = passThroughRead(origReadFile,litroot);
clonedfs.writeFile = writeFileP(clonedfs, litroot);
clonedfs.readStat = passThroughReadWithStat(clonedfs.readFile, origStat, litroot, ghOpts, noPassthrough)
if(ghOpts) clonedfs.writeFile = passThroughWrite(clonedfs, litroot, ghOpts);
if(ghOpts) clonedfs.unlink = passThroughUnlink(clonedfs, litroot, ghOpts);
return clonedfs
};
import { b64EncodeUnicode, b64DecodeUnicode } from './safe-encoders'
import { getConsoleForNamespace } from './console'
import {join} from 'path'
const console = getConsoleForNamespace('fs/gh')
const getEndpoint = (opts,file) => `https://api.github.com/repos/${opts.username}/${opts.repository}/${join('contents', file)}`
export const ghReadFile = opts => async (...args) => {
const file = join(opts.prefix,args[0])
const params = {
method: "GET",
headers: {
"Authorization": `token ${opts.token}`,
'Content-Type': 'application/json'
},
}
const resp = await fetch(getEndpoint(opts,file), params)
// resp.origJson = resp.json
resp.text = async () => {
console.log("ghReadFile text()...")
const data = await resp.json()
console.log("ghReadFile data", data)
const content = b64DecodeUnicode(data.content)
console.log("ghReadFile decoded")
return content
}
console.log("ghReadFile", file, resp)
return resp
}
export const ghWriteFile = (opts) => async (...args) => {
const file = join(opts.prefix,args[0])
const content = args[1].toString()
console.log("ghWriteFile", file)
const endpoint = getEndpoint(opts, file)
const resp1 = await fetch(endpoint, {
method: "GET",
headers: {
"Authorization": `token ${opts.token}`,
'Content-Type': 'application/json'
}
})
const json1 = await resp1.json()
console.log(endpoint, json1.sha ? "Exists, updating...":"Dosn't exist, creating...")
const params = {
method: "PUT",
headers: {
"Authorization": `token ${opts.token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
sha: json1.sha,
message: opts.commitMessage || `Edited ${file}`,
content: b64EncodeUnicode(content)
})
}
console.log("ghWriteFile params", params)
let resp2;
try {
resp2 = await fetch(endpoint, params)
} catch(err) {
console.log("ghWriteFile PUT failed", err)
}
return resp2 && resp2.status
}
export const ghDeleteFile = opts => async (...args) => {
const file = join(opts.prefix,args[0])
console.log("ghDeleteFile", file)
const endpoint = getEndpoint(opts, file)
const resp1 = await fetch(endpoint)
const json1 = await resp1.json()
console.log(endpoint, json1.sha ? "Exists, deleting...":"Dosn't exist")
const params = {
method: "DELETE",
headers: {
"Authorization": `token ${opts.token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
sha: json1.sha,
message: opts.commitMessage || `Deleted ${file}`,
})
}
console.log("ghDeleteFile params", params)
let resp2;
try {
resp2 = await fetch(endpoint, params)
const json = await resp2.json()
console.log("ghDeleteFile DELETE response", resp2, json)
} catch(err) {
console.log("ghDeleteFile DELETE failed", err.message, err)
}
return resp2 && resp2.status
}
// initially, because it's on every change
// a commit will mostly be for a single
// file at a time the immediate exception
// being when a file with output files
// is edited, in which case the commit
// includes those files.
export const onSave = async (filename) => {
const now = (new Date()).toISOString()
const fs = lit.lfs
const dir = lit.location.root
const git = lit.git
const FILE = 0, WORKDIR = 2, STAGE = 3
const unstaged = row => {
return row[WORKDIR] !== row[STAGE]
}
// get/list unstaged files
const status = await git.statusMatrix({ fs,dir})
const files = status
.filter( unstaged )
.map(row => row[FILE])
// stage everything
await git.add({fs, dir, filepath: '.'})
// message
const message = `Auto commit ${filename}
at ${now} includes the following ${files.length} files:
${files.map(f=> "- " + f).join('\n')}`
// commit
const sha = await git.commit({fs, dir,
message,
author: {
name: 'dotlit',
email: 'bit@dotlit.org'
}
})
return `Committed ${sha.slice(0,6)}
${message}`
}
// return onSave(lit.location.src)
Some additional utils include lit.delete()
and lit.read()
these were used during testing but will likely move.
Momento
A basic [moment.js] alternative, for displaying user friendly times. See utils/momento
Other
return lit.utils