mirror of
https://github.com/johndoe6345789/low-code-react-app-b.git
synced 2026-04-25 06:04:54 +00:00
Merge pull request #11 from johndoe6345789/copilot/fix-missing-module-declarations
Implement @github/spark package source and build pipeline
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -13,6 +13,9 @@ dist-ssr
|
||||
*-dist
|
||||
*.local
|
||||
|
||||
# Exception: Include dist folder for workspace packages
|
||||
!packages/*/dist
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
|
||||
3660
package-lock.json
generated
3660
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
12
packages/spark-tools/dist/hooks/index.d.ts
vendored
Normal file
12
packages/spark-tools/dist/hooks/index.d.ts
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* useKV Hook - Persistent key-value storage with localStorage and window.spark.kv integration
|
||||
*
|
||||
* This hook provides persistent state management that syncs with localStorage
|
||||
* and integrates with the Spark KV storage system if available.
|
||||
*
|
||||
* @param key - Storage key
|
||||
* @param defaultValue - Default value if key doesn't exist
|
||||
* @returns Tuple of [value, setValue, deleteValue]
|
||||
*/
|
||||
export declare function useKV<T>(key: string, defaultValue: T): [T, (value: T | ((prev: T) => T)) => void, () => void];
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
1
packages/spark-tools/dist/hooks/index.d.ts.map
vendored
Normal file
1
packages/spark-tools/dist/hooks/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/hooks/index.ts"],"names":[],"mappings":"AAEA;;;;;;;;;GASG;AACH,wBAAgB,KAAK,CAAC,CAAC,EACrB,GAAG,EAAE,MAAM,EACX,YAAY,EAAE,CAAC,GACd,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,IAAI,EAAE,MAAM,IAAI,CAAC,CA8ExD"}
|
||||
8
packages/spark-tools/dist/index.d.ts
vendored
Normal file
8
packages/spark-tools/dist/index.d.ts
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* @github/spark - Main Hooks Entry Point
|
||||
*
|
||||
* This is the entry point for the hooks exports from the Spark package.
|
||||
*/
|
||||
export { useKV } from './hooks/index';
|
||||
export { sparkRuntime } from './lib/spark-runtime';
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
1
packages/spark-tools/dist/index.d.ts.map
vendored
Normal file
1
packages/spark-tools/dist/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAA;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAA"}
|
||||
2
packages/spark-tools/dist/index.js
vendored
Normal file
2
packages/spark-tools/dist/index.js
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import{useState as r,useCallback as e,useEffect as o}from"react";export{s as sparkRuntime}from"./spark-runtime-wNXbhm34.js";function t(t,n){const[s,a]=r(()=>{try{if("undefined"!=typeof window&&window.spark?.kv){const r=window.spark.kv.get(t);if(void 0!==r)return r}const r=localStorage.getItem(t);return r?JSON.parse(r):n}catch(r){return console.error("Error reading from storage:",r),n}}),i=e(r=>{try{a(e=>{const o="function"==typeof r?r(e):r;return localStorage.setItem(t,JSON.stringify(o)),"undefined"!=typeof window&&window.spark?.kv&&window.spark.kv.set(t,o),o})}catch(r){console.error("Error writing to storage:",r)}},[t]),w=e(()=>{try{localStorage.removeItem(t),"undefined"!=typeof window&&window.spark?.kv&&window.spark.kv.delete(t),a(n)}catch(r){console.error("Error deleting from storage:",r)}},[t,n]);return o(()=>{const r=r=>{if(r.key===t&&null!==r.newValue)try{a(JSON.parse(r.newValue))}catch(r){console.error("Error parsing storage event:",r)}};return window.addEventListener("storage",r),()=>window.removeEventListener("storage",r)},[t]),[s,i,w]}export{t as useKV};
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
packages/spark-tools/dist/index.js.map
vendored
Normal file
1
packages/spark-tools/dist/index.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sources":["../src/hooks/index.ts"],"sourcesContent":["import { useState, useEffect, useCallback } from 'react'\n\n/**\n * useKV Hook - Persistent key-value storage with localStorage and window.spark.kv integration\n * \n * This hook provides persistent state management that syncs with localStorage\n * and integrates with the Spark KV storage system if available.\n * \n * @param key - Storage key\n * @param defaultValue - Default value if key doesn't exist\n * @returns Tuple of [value, setValue, deleteValue]\n */\nexport function useKV<T>(\n key: string,\n defaultValue: T\n): [T, (value: T | ((prev: T) => T)) => void, () => void] {\n // Initialize state from localStorage or default value\n const [value, setValueInternal] = useState<T>(() => {\n try {\n // Try to get from window.spark.kv first\n if (typeof window !== 'undefined' && window.spark?.kv) {\n const sparkValue = window.spark.kv.get(key)\n if (sparkValue !== undefined) {\n return sparkValue as T\n }\n }\n\n // Fallback to localStorage\n const item = localStorage.getItem(key)\n return item ? JSON.parse(item) : defaultValue\n } catch (error) {\n console.error('Error reading from storage:', error)\n return defaultValue\n }\n })\n\n // Set value and sync to storage\n const setValue = useCallback(\n (newValue: T | ((prev: T) => T)) => {\n try {\n setValueInternal((prevValue) => {\n const valueToStore =\n typeof newValue === 'function'\n ? (newValue as (prev: T) => T)(prevValue)\n : newValue\n\n // Store in localStorage\n localStorage.setItem(key, JSON.stringify(valueToStore))\n\n // Store in window.spark.kv if available\n if (typeof window !== 'undefined' && window.spark?.kv) {\n window.spark.kv.set(key, valueToStore)\n }\n\n return valueToStore\n })\n } catch (error) {\n console.error('Error writing to storage:', error)\n }\n },\n [key]\n )\n\n // Delete value from storage\n const deleteValue = useCallback(() => {\n try {\n localStorage.removeItem(key)\n if (typeof window !== 'undefined' && window.spark?.kv) {\n window.spark.kv.delete(key)\n }\n setValueInternal(defaultValue)\n } catch (error) {\n console.error('Error deleting from storage:', error)\n }\n }, [key, defaultValue])\n\n // Sync with localStorage changes from other tabs\n useEffect(() => {\n const handleStorageChange = (e: StorageEvent) => {\n if (e.key === key && e.newValue !== null) {\n try {\n setValueInternal(JSON.parse(e.newValue))\n } catch (error) {\n console.error('Error parsing storage event:', error)\n }\n }\n }\n\n window.addEventListener('storage', handleStorageChange)\n return () => window.removeEventListener('storage', handleStorageChange)\n }, [key])\n\n return [value, setValue, deleteValue]\n}\n"],"names":["useKV","key","defaultValue","value","setValueInternal","useState","window","spark","kv","sparkValue","get","undefined","item","localStorage","getItem","JSON","parse","error","console","setValue","useCallback","newValue","prevValue","valueToStore","setItem","stringify","set","deleteValue","removeItem","delete","useEffect","handleStorageChange","e","addEventListener","removeEventListener"],"mappings":"4HAYM,SAAUA,EACdC,EACAC,GAGA,MAAOC,EAAOC,GAAoBC,EAAY,KAC5C,IAEE,GAAsB,oBAAXC,QAA0BA,OAAOC,OAAOC,GAAI,CACrD,MAAMC,EAAaH,OAAOC,MAAMC,GAAGE,IAAIT,GACvC,QAAmBU,IAAfF,EACF,OAAOA,CAEX,CAGA,MAAMG,EAAOC,aAAaC,QAAQb,GAClC,OAAOW,EAAOG,KAAKC,MAAMJ,GAAQV,CACnC,CAAE,MAAOe,GAEP,OADAC,QAAQD,MAAM,8BAA+BA,GACtCf,CACT,IAIIiB,EAAWC,EACdC,IACC,IACEjB,EAAkBkB,IAChB,MAAMC,EACgB,mBAAbF,EACFA,EAA4BC,GAC7BD,EAUN,OAPAR,aAAaW,QAAQvB,EAAKc,KAAKU,UAAUF,IAGnB,oBAAXjB,QAA0BA,OAAOC,OAAOC,IACjDF,OAAOC,MAAMC,GAAGkB,IAAIzB,EAAKsB,GAGpBA,GAEX,CAAE,MAAON,GACPC,QAAQD,MAAM,4BAA6BA,EAC7C,GAEF,CAAChB,IAIG0B,EAAcP,EAAY,KAC9B,IACEP,aAAae,WAAW3B,GACF,oBAAXK,QAA0BA,OAAOC,OAAOC,IACjDF,OAAOC,MAAMC,GAAGqB,OAAO5B,GAEzBG,EAAiBF,EACnB,CAAE,MAAOe,GACPC,QAAQD,MAAM,+BAAgCA,EAChD,GACC,CAAChB,EAAKC,IAkBT,OAfA4B,EAAU,KACR,MAAMC,EAAuBC,IAC3B,GAAIA,EAAE/B,MAAQA,GAAsB,OAAf+B,EAAEX,SACrB,IACEjB,EAAiBW,KAAKC,MAAMgB,EAAEX,UAChC,CAAE,MAAOJ,GACPC,QAAQD,MAAM,+BAAgCA,EAChD,GAKJ,OADAX,OAAO2B,iBAAiB,UAAWF,GAC5B,IAAMzB,OAAO4B,oBAAoB,UAAWH,IAClD,CAAC9B,IAEG,CAACE,EAAOgB,EAAUQ,EAC3B"}
|
||||
36
packages/spark-tools/dist/lib/spark-runtime.d.ts
vendored
Normal file
36
packages/spark-tools/dist/lib/spark-runtime.d.ts
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
/**
|
||||
* Spark Runtime - Core runtime services for Spark applications
|
||||
*
|
||||
* This module provides mock implementations of Spark services including:
|
||||
* - KV storage (key-value store)
|
||||
* - LLM service (language model integration)
|
||||
* - User authentication
|
||||
*/
|
||||
interface LLMChatResponse {
|
||||
role: string;
|
||||
content: string;
|
||||
}
|
||||
export declare const sparkRuntime: {
|
||||
kv: {
|
||||
get: <T = any>(key: string) => T | undefined;
|
||||
set: (key: string, value: any) => void;
|
||||
delete: (key: string) => void;
|
||||
clear: () => void;
|
||||
keys: () => string[];
|
||||
};
|
||||
llm: {
|
||||
(prompt: string, model?: string, jsonMode?: boolean): Promise<string>;
|
||||
chat(messages: any[]): Promise<LLMChatResponse>;
|
||||
complete(prompt: string): Promise<string>;
|
||||
};
|
||||
user: {
|
||||
getCurrentUser: () => {
|
||||
id: string;
|
||||
name: string;
|
||||
email: string;
|
||||
};
|
||||
isAuthenticated: () => boolean;
|
||||
};
|
||||
};
|
||||
export {};
|
||||
//# sourceMappingURL=spark-runtime.d.ts.map
|
||||
1
packages/spark-tools/dist/lib/spark-runtime.d.ts.map
vendored
Normal file
1
packages/spark-tools/dist/lib/spark-runtime.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"spark-runtime.d.ts","sourceRoot":"","sources":["../../src/lib/spark-runtime.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAGH,UAAU,eAAe;IACvB,IAAI,EAAE,MAAM,CAAA;IACZ,OAAO,EAAE,MAAM,CAAA;CAChB;AAwBD,eAAO,MAAM,YAAY;;cAEf,CAAC,aAAa,MAAM,KAAG,CAAC,GAAG,SAAS;mBAa/B,MAAM,SAAS,GAAG;sBAQf,MAAM;;;;;iBAzCW,MAAM,UAAU,MAAM,aAAa,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;uBAK3D,GAAG,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC;yBAQ9B,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;;;;;;;;;;CA4D7D,CAAA"}
|
||||
14
packages/spark-tools/dist/lib/spark.d.ts
vendored
Normal file
14
packages/spark-tools/dist/lib/spark.d.ts
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
/**
|
||||
* Spark Initialization Module
|
||||
*
|
||||
* This module initializes the Spark runtime and makes it available globally
|
||||
* via window.spark. It should be imported early in the application lifecycle.
|
||||
*/
|
||||
import { sparkRuntime } from './spark-runtime';
|
||||
declare global {
|
||||
interface Window {
|
||||
spark: typeof sparkRuntime;
|
||||
}
|
||||
}
|
||||
export default sparkRuntime;
|
||||
//# sourceMappingURL=spark.d.ts.map
|
||||
1
packages/spark-tools/dist/lib/spark.d.ts.map
vendored
Normal file
1
packages/spark-tools/dist/lib/spark.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"spark.d.ts","sourceRoot":"","sources":["../../src/lib/spark.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAA;AAG9C,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,MAAM;QACd,KAAK,EAAE,OAAO,YAAY,CAAA;KAC3B;CACF;AAOD,eAAe,YAAY,CAAA"}
|
||||
2
packages/spark-tools/dist/spark-runtime-wNXbhm34.js
vendored
Normal file
2
packages/spark-tools/dist/spark-runtime-wNXbhm34.js
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
const e=new Map,o=async(e,o,r)=>(console.log("Mock LLM called with prompt:",e,"model:",o,"jsonMode:",r),"This is a mock response from the Spark LLM service.");o.chat=async e=>(console.log("Mock LLM chat called with messages:",e),{role:"assistant",content:"This is a mock response from the Spark LLM service."}),o.complete=async e=>(console.log("Mock LLM complete called with prompt:",e),"This is a mock completion from the Spark LLM service.");const r={kv:{get:o=>{try{const r=e.get(o);if(void 0!==r)return r;const t=localStorage.getItem(o);return t?JSON.parse(t):void 0}catch(e){return void console.error("Error getting KV value:",e)}},set:(o,r)=>{try{e.set(o,r),localStorage.setItem(o,JSON.stringify(r))}catch(e){console.error("Error setting KV value:",e)}},delete:o=>{try{e.delete(o),localStorage.removeItem(o)}catch(e){console.error("Error deleting KV value:",e)}},clear:()=>{try{const o=Array.from(e.keys());e.clear(),o.forEach(e=>localStorage.removeItem(e))}catch(e){console.error("Error clearing KV storage:",e)}},keys:()=>Array.from(e.keys())},llm:o,user:{getCurrentUser:()=>({id:"mock-user-id",name:"Mock User",email:"mock@example.com"}),isAuthenticated:()=>!0}};export{r as s};
|
||||
//# sourceMappingURL=spark-runtime-wNXbhm34.js.map
|
||||
1
packages/spark-tools/dist/spark-runtime-wNXbhm34.js.map
vendored
Normal file
1
packages/spark-tools/dist/spark-runtime-wNXbhm34.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"spark-runtime-wNXbhm34.js","sources":["../src/lib/spark-runtime.ts"],"sourcesContent":["/**\n * Spark Runtime - Core runtime services for Spark applications\n * \n * This module provides mock implementations of Spark services including:\n * - KV storage (key-value store)\n * - LLM service (language model integration)\n * - User authentication\n */\n\n// Type definitions for LLM responses\ninterface LLMChatResponse {\n role: string\n content: string\n}\n\n// Mock KV Storage\nconst kvStorage = new Map<string, any>()\n\n// Create llm function with additional properties\nconst llmFunction = async (prompt: string, model?: string, jsonMode?: boolean): Promise<string> => {\n console.log('Mock LLM called with prompt:', prompt, 'model:', model, 'jsonMode:', jsonMode)\n return 'This is a mock response from the Spark LLM service.'\n}\n\nllmFunction.chat = async (messages: any[]): Promise<LLMChatResponse> => {\n console.log('Mock LLM chat called with messages:', messages)\n return {\n role: 'assistant',\n content: 'This is a mock response from the Spark LLM service.'\n }\n}\n\nllmFunction.complete = async (prompt: string): Promise<string> => {\n console.log('Mock LLM complete called with prompt:', prompt)\n return 'This is a mock completion from the Spark LLM service.'\n}\n\nexport const sparkRuntime = {\n kv: {\n get: <T = any>(key: string): T | undefined => {\n try {\n const value = kvStorage.get(key)\n if (value !== undefined) {\n return value as T\n }\n const stored = localStorage.getItem(key)\n return stored ? JSON.parse(stored) : undefined\n } catch (error) {\n console.error('Error getting KV value:', error)\n return undefined\n }\n },\n set: (key: string, value: any) => {\n try {\n kvStorage.set(key, value)\n localStorage.setItem(key, JSON.stringify(value))\n } catch (error) {\n console.error('Error setting KV value:', error)\n }\n },\n delete: (key: string) => {\n try {\n kvStorage.delete(key)\n localStorage.removeItem(key)\n } catch (error) {\n console.error('Error deleting KV value:', error)\n }\n },\n clear: () => {\n try {\n // Get keys before clearing\n const keysToRemove = Array.from(kvStorage.keys())\n kvStorage.clear()\n // Clear corresponding keys from localStorage\n keysToRemove.forEach(key => localStorage.removeItem(key))\n } catch (error) {\n console.error('Error clearing KV storage:', error)\n }\n },\n keys: () => Array.from(kvStorage.keys())\n },\n \n llm: llmFunction,\n \n user: {\n getCurrentUser: () => ({\n id: 'mock-user-id',\n name: 'Mock User',\n email: 'mock@example.com'\n }),\n isAuthenticated: () => true\n }\n}\n"],"names":["kvStorage","Map","llmFunction","async","prompt","model","jsonMode","console","log","chat","messages","role","content","complete","sparkRuntime","kv","get","key","value","undefined","stored","localStorage","getItem","JSON","parse","error","set","setItem","stringify","delete","removeItem","clear","keysToRemove","Array","from","keys","forEach","llm","user","getCurrentUser","id","name","email","isAuthenticated"],"mappings":"AAgBA,MAAMA,EAAY,IAAIC,IAGhBC,EAAcC,MAAOC,EAAgBC,EAAgBC,KACzDC,QAAQC,IAAI,+BAAgCJ,EAAQ,SAAUC,EAAO,YAAaC,GAC3E,uDAGTJ,EAAYO,KAAON,MAAOO,IACxBH,QAAQC,IAAI,sCAAuCE,GAC5C,CACLC,KAAM,YACNC,QAAS,wDAIbV,EAAYW,SAAWV,MAAOC,IAC5BG,QAAQC,IAAI,wCAAyCJ,GAC9C,yDAGF,MAAMU,EAAe,CAC1BC,GAAI,CACFC,IAAeC,IACb,IACE,MAAMC,EAAQlB,EAAUgB,IAAIC,GAC5B,QAAcE,IAAVD,EACF,OAAOA,EAET,MAAME,EAASC,aAAaC,QAAQL,GACpC,OAAOG,EAASG,KAAKC,MAAMJ,QAAUD,CACvC,CAAE,MAAOM,GAEP,YADAlB,QAAQkB,MAAM,0BAA2BA,EAE3C,GAEFC,IAAK,CAACT,EAAaC,KACjB,IACElB,EAAU0B,IAAIT,EAAKC,GACnBG,aAAaM,QAAQV,EAAKM,KAAKK,UAAUV,GAC3C,CAAE,MAAOO,GACPlB,QAAQkB,MAAM,0BAA2BA,EAC3C,GAEFI,OAASZ,IACP,IACEjB,EAAU6B,OAAOZ,GACjBI,aAAaS,WAAWb,EAC1B,CAAE,MAAOQ,GACPlB,QAAQkB,MAAM,2BAA4BA,EAC5C,GAEFM,MAAO,KACL,IAEE,MAAMC,EAAeC,MAAMC,KAAKlC,EAAUmC,QAC1CnC,EAAU+B,QAEVC,EAAaI,QAAQnB,GAAOI,aAAaS,WAAWb,GACtD,CAAE,MAAOQ,GACPlB,QAAQkB,MAAM,6BAA8BA,EAC9C,GAEFU,KAAM,IAAMF,MAAMC,KAAKlC,EAAUmC,SAGnCE,IAAKnC,EAELoC,KAAM,CACJC,eAAgB,KAAA,CACdC,GAAI,eACJC,KAAM,YACNC,MAAO,qBAETC,gBAAiB,KAAM"}
|
||||
8
packages/spark-tools/dist/spark.d.ts
vendored
Normal file
8
packages/spark-tools/dist/spark.d.ts
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* Spark Module - Main export for spark runtime
|
||||
*
|
||||
* Re-export spark runtime for '@github/spark/spark' imports
|
||||
*/
|
||||
export { default } from './lib/spark';
|
||||
export { sparkRuntime } from './lib/spark-runtime';
|
||||
//# sourceMappingURL=spark.d.ts.map
|
||||
1
packages/spark-tools/dist/spark.d.ts.map
vendored
Normal file
1
packages/spark-tools/dist/spark.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"spark.d.ts","sourceRoot":"","sources":["../src/spark.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,aAAa,CAAA;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAA"}
|
||||
2
packages/spark-tools/dist/spark.js
vendored
Normal file
2
packages/spark-tools/dist/spark.js
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import{s as e}from"./spark-runtime-wNXbhm34.js";"undefined"!=typeof window&&(window.spark=e);export{e as default,e as sparkRuntime};
|
||||
//# sourceMappingURL=spark.js.map
|
||||
1
packages/spark-tools/dist/spark.js.map
vendored
Normal file
1
packages/spark-tools/dist/spark.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"spark.js","sources":["../src/lib/spark.ts"],"sourcesContent":["/**\n * Spark Initialization Module\n * \n * This module initializes the Spark runtime and makes it available globally\n * via window.spark. It should be imported early in the application lifecycle.\n */\n\nimport { sparkRuntime } from './spark-runtime'\n\n// Declare global window.spark\ndeclare global {\n interface Window {\n spark: typeof sparkRuntime\n }\n}\n\n// Initialize window.spark\nif (typeof window !== 'undefined') {\n window.spark = sparkRuntime\n}\n\nexport default sparkRuntime\n"],"names":["window","spark","sparkRuntime"],"mappings":"gDAiBsB,oBAAXA,SACTA,OAAOC,MAAQC"}
|
||||
15
packages/spark-tools/dist/sparkVitePlugin.d.ts
vendored
Normal file
15
packages/spark-tools/dist/sparkVitePlugin.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Spark Vite Plugin
|
||||
*
|
||||
* This plugin integrates Spark functionality into the Vite build process.
|
||||
* Currently provides a minimal implementation that can be extended with:
|
||||
* - Spark runtime injection
|
||||
* - Configuration validation
|
||||
* - Development server enhancements
|
||||
*/
|
||||
export default function sparkPlugin(): {
|
||||
name: string;
|
||||
configResolved(config: any): void;
|
||||
transformIndexHtml(html: string): string;
|
||||
};
|
||||
//# sourceMappingURL=sparkVitePlugin.d.ts.map
|
||||
1
packages/spark-tools/dist/sparkVitePlugin.d.ts.map
vendored
Normal file
1
packages/spark-tools/dist/sparkVitePlugin.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"sparkVitePlugin.d.ts","sourceRoot":"","sources":["../src/sparkVitePlugin.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAEH,MAAM,CAAC,OAAO,UAAU,WAAW;;2BAIR,GAAG;6BAKD,MAAM;EAMlC"}
|
||||
2
packages/spark-tools/dist/sparkVitePlugin.js
vendored
Normal file
2
packages/spark-tools/dist/sparkVitePlugin.js
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
function e(){return{name:"spark-vite-plugin",configResolved(e){},transformIndexHtml:e=>e}}export{e as default};
|
||||
//# sourceMappingURL=sparkVitePlugin.js.map
|
||||
1
packages/spark-tools/dist/sparkVitePlugin.js.map
vendored
Normal file
1
packages/spark-tools/dist/sparkVitePlugin.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"sparkVitePlugin.js","sources":["../src/sparkVitePlugin.ts"],"sourcesContent":["/**\n * Spark Vite Plugin\n * \n * This plugin integrates Spark functionality into the Vite build process.\n * Currently provides a minimal implementation that can be extended with:\n * - Spark runtime injection\n * - Configuration validation\n * - Development server enhancements\n */\n\nexport default function sparkPlugin() {\n return {\n name: 'spark-vite-plugin',\n \n configResolved(config: any) {\n // TODO: Add Spark-specific configuration handling if needed\n // This hook is called after the Vite config is resolved\n },\n \n transformIndexHtml(html: string) {\n // TODO: Add Spark runtime injection to HTML if needed\n // Currently returns HTML unchanged\n return html\n }\n }\n}\n"],"names":["sparkPlugin","name","configResolved","config","transformIndexHtml","html"],"mappings":"AAUc,SAAUA,IACtB,MAAO,CACLC,KAAM,oBAEN,cAAAC,CAAeC,GAGf,EAEAC,mBAAmBC,GAGVA,EAGb"}
|
||||
16
packages/spark-tools/dist/vitePhosphorIconProxyPlugin.d.ts
vendored
Normal file
16
packages/spark-tools/dist/vitePhosphorIconProxyPlugin.d.ts
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* Vite Phosphor Icon Proxy Plugin
|
||||
*
|
||||
* This plugin provides a proxy for Phosphor icon imports.
|
||||
* Currently provides a pass-through implementation that allows
|
||||
* Vite to handle icon imports normally. Can be extended to:
|
||||
* - Optimize icon bundle sizes
|
||||
* - Implement lazy loading for icons
|
||||
* - Transform icon imports for better tree-shaking
|
||||
*/
|
||||
export default function createIconImportProxy(): {
|
||||
name: string;
|
||||
resolveId(id: string): null | undefined;
|
||||
transform(code: string, id: string): null;
|
||||
};
|
||||
//# sourceMappingURL=vitePhosphorIconProxyPlugin.d.ts.map
|
||||
1
packages/spark-tools/dist/vitePhosphorIconProxyPlugin.d.ts.map
vendored
Normal file
1
packages/spark-tools/dist/vitePhosphorIconProxyPlugin.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"vitePhosphorIconProxyPlugin.d.ts","sourceRoot":"","sources":["../src/vitePhosphorIconProxyPlugin.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,MAAM,CAAC,OAAO,UAAU,qBAAqB;;kBAI3B,MAAM;oBAQJ,MAAM,MAAM,MAAM;EAMrC"}
|
||||
2
packages/spark-tools/dist/vitePhosphorIconProxyPlugin.js
vendored
Normal file
2
packages/spark-tools/dist/vitePhosphorIconProxyPlugin.js
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
function r(){return{name:"vite-phosphor-icon-proxy",resolveId(r){if(r.includes("@phosphor-icons/react"))return null},transform:(r,n)=>null}}export{r as default};
|
||||
//# sourceMappingURL=vitePhosphorIconProxyPlugin.js.map
|
||||
1
packages/spark-tools/dist/vitePhosphorIconProxyPlugin.js.map
vendored
Normal file
1
packages/spark-tools/dist/vitePhosphorIconProxyPlugin.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"vitePhosphorIconProxyPlugin.js","sources":["../src/vitePhosphorIconProxyPlugin.ts"],"sourcesContent":["/**\n * Vite Phosphor Icon Proxy Plugin\n * \n * This plugin provides a proxy for Phosphor icon imports.\n * Currently provides a pass-through implementation that allows\n * Vite to handle icon imports normally. Can be extended to:\n * - Optimize icon bundle sizes\n * - Implement lazy loading for icons\n * - Transform icon imports for better tree-shaking\n */\n\nexport default function createIconImportProxy() {\n return {\n name: 'vite-phosphor-icon-proxy',\n \n resolveId(id: string) {\n // TODO: Add custom icon resolution if needed\n // Currently lets Vite handle all icon imports normally\n if (id.includes('@phosphor-icons/react')) {\n return null // Let Vite handle it normally\n }\n },\n \n transform(code: string, id: string) {\n // TODO: Add icon import transformations if needed\n // Currently returns null to let Vite handle transformations\n return null\n }\n }\n}\n"],"names":["createIconImportProxy","name","resolveId","id","includes","transform","code"],"mappings":"AAWc,SAAUA,IACtB,MAAO,CACLC,KAAM,2BAEN,SAAAC,CAAUC,GAGR,GAAIA,EAAGC,SAAS,yBACd,OAAO,IAEX,EAEAC,UAAS,CAACC,EAAcH,IAGf,KAGb"}
|
||||
33
packages/spark-tools/rollup.config.ts
Normal file
33
packages/spark-tools/rollup.config.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import typescript from '@rollup/plugin-typescript'
|
||||
import resolve from '@rollup/plugin-node-resolve'
|
||||
import commonjs from '@rollup/plugin-commonjs'
|
||||
import terser from '@rollup/plugin-terser'
|
||||
import del from 'rollup-plugin-delete'
|
||||
|
||||
export default {
|
||||
input: {
|
||||
index: 'src/index.ts',
|
||||
spark: 'src/spark.ts',
|
||||
sparkVitePlugin: 'src/sparkVitePlugin.ts',
|
||||
vitePhosphorIconProxyPlugin: 'src/vitePhosphorIconProxyPlugin.ts',
|
||||
},
|
||||
output: {
|
||||
dir: 'dist',
|
||||
format: 'es',
|
||||
sourcemap: true,
|
||||
preserveModules: false,
|
||||
},
|
||||
external: ['react', 'react-dom', 'vite'],
|
||||
plugins: [
|
||||
del({ targets: 'dist/*' }),
|
||||
resolve(),
|
||||
commonjs(),
|
||||
typescript({
|
||||
tsconfig: './tsconfig.json',
|
||||
declaration: true,
|
||||
declarationDir: 'dist',
|
||||
rootDir: 'src',
|
||||
}),
|
||||
terser(),
|
||||
],
|
||||
}
|
||||
94
packages/spark-tools/src/hooks/index.ts
Normal file
94
packages/spark-tools/src/hooks/index.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { useState, useEffect, useCallback } from 'react'
|
||||
|
||||
/**
|
||||
* useKV Hook - Persistent key-value storage with localStorage and window.spark.kv integration
|
||||
*
|
||||
* This hook provides persistent state management that syncs with localStorage
|
||||
* and integrates with the Spark KV storage system if available.
|
||||
*
|
||||
* @param key - Storage key
|
||||
* @param defaultValue - Default value if key doesn't exist
|
||||
* @returns Tuple of [value, setValue, deleteValue]
|
||||
*/
|
||||
export function useKV<T>(
|
||||
key: string,
|
||||
defaultValue: T
|
||||
): [T, (value: T | ((prev: T) => T)) => void, () => void] {
|
||||
// Initialize state from localStorage or default value
|
||||
const [value, setValueInternal] = useState<T>(() => {
|
||||
try {
|
||||
// Try to get from window.spark.kv first
|
||||
if (typeof window !== 'undefined' && window.spark?.kv) {
|
||||
const sparkValue = window.spark.kv.get(key)
|
||||
if (sparkValue !== undefined) {
|
||||
return sparkValue as T
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to localStorage
|
||||
const item = localStorage.getItem(key)
|
||||
return item ? JSON.parse(item) : defaultValue
|
||||
} catch (error) {
|
||||
console.error('Error reading from storage:', error)
|
||||
return defaultValue
|
||||
}
|
||||
})
|
||||
|
||||
// Set value and sync to storage
|
||||
const setValue = useCallback(
|
||||
(newValue: T | ((prev: T) => T)) => {
|
||||
try {
|
||||
setValueInternal((prevValue) => {
|
||||
const valueToStore =
|
||||
typeof newValue === 'function'
|
||||
? (newValue as (prev: T) => T)(prevValue)
|
||||
: newValue
|
||||
|
||||
// Store in localStorage
|
||||
localStorage.setItem(key, JSON.stringify(valueToStore))
|
||||
|
||||
// Store in window.spark.kv if available
|
||||
if (typeof window !== 'undefined' && window.spark?.kv) {
|
||||
window.spark.kv.set(key, valueToStore)
|
||||
}
|
||||
|
||||
return valueToStore
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error writing to storage:', error)
|
||||
}
|
||||
},
|
||||
[key]
|
||||
)
|
||||
|
||||
// Delete value from storage
|
||||
const deleteValue = useCallback(() => {
|
||||
try {
|
||||
localStorage.removeItem(key)
|
||||
if (typeof window !== 'undefined' && window.spark?.kv) {
|
||||
window.spark.kv.delete(key)
|
||||
}
|
||||
setValueInternal(defaultValue)
|
||||
} catch (error) {
|
||||
console.error('Error deleting from storage:', error)
|
||||
}
|
||||
}, [key, defaultValue])
|
||||
|
||||
// Sync with localStorage changes from other tabs
|
||||
useEffect(() => {
|
||||
const handleStorageChange = (e: StorageEvent) => {
|
||||
if (e.key === key && e.newValue !== null) {
|
||||
try {
|
||||
setValueInternal(JSON.parse(e.newValue))
|
||||
} catch (error) {
|
||||
console.error('Error parsing storage event:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener('storage', handleStorageChange)
|
||||
return () => window.removeEventListener('storage', handleStorageChange)
|
||||
}, [key])
|
||||
|
||||
return [value, setValue, deleteValue]
|
||||
}
|
||||
8
packages/spark-tools/src/index.ts
Normal file
8
packages/spark-tools/src/index.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* @github/spark - Main Hooks Entry Point
|
||||
*
|
||||
* This is the entry point for the hooks exports from the Spark package.
|
||||
*/
|
||||
|
||||
export { useKV } from './hooks/index'
|
||||
export { sparkRuntime } from './lib/spark-runtime'
|
||||
93
packages/spark-tools/src/lib/spark-runtime.ts
Normal file
93
packages/spark-tools/src/lib/spark-runtime.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
/**
|
||||
* Spark Runtime - Core runtime services for Spark applications
|
||||
*
|
||||
* This module provides mock implementations of Spark services including:
|
||||
* - KV storage (key-value store)
|
||||
* - LLM service (language model integration)
|
||||
* - User authentication
|
||||
*/
|
||||
|
||||
// Type definitions for LLM responses
|
||||
interface LLMChatResponse {
|
||||
role: string
|
||||
content: string
|
||||
}
|
||||
|
||||
// Mock KV Storage
|
||||
const kvStorage = new Map<string, any>()
|
||||
|
||||
// Create llm function with additional properties
|
||||
const llmFunction = async (prompt: string, model?: string, jsonMode?: boolean): Promise<string> => {
|
||||
console.log('Mock LLM called with prompt:', prompt, 'model:', model, 'jsonMode:', jsonMode)
|
||||
return 'This is a mock response from the Spark LLM service.'
|
||||
}
|
||||
|
||||
llmFunction.chat = async (messages: any[]): Promise<LLMChatResponse> => {
|
||||
console.log('Mock LLM chat called with messages:', messages)
|
||||
return {
|
||||
role: 'assistant',
|
||||
content: 'This is a mock response from the Spark LLM service.'
|
||||
}
|
||||
}
|
||||
|
||||
llmFunction.complete = async (prompt: string): Promise<string> => {
|
||||
console.log('Mock LLM complete called with prompt:', prompt)
|
||||
return 'This is a mock completion from the Spark LLM service.'
|
||||
}
|
||||
|
||||
export const sparkRuntime = {
|
||||
kv: {
|
||||
get: <T = any>(key: string): T | undefined => {
|
||||
try {
|
||||
const value = kvStorage.get(key)
|
||||
if (value !== undefined) {
|
||||
return value as T
|
||||
}
|
||||
const stored = localStorage.getItem(key)
|
||||
return stored ? JSON.parse(stored) : undefined
|
||||
} catch (error) {
|
||||
console.error('Error getting KV value:', error)
|
||||
return undefined
|
||||
}
|
||||
},
|
||||
set: (key: string, value: any) => {
|
||||
try {
|
||||
kvStorage.set(key, value)
|
||||
localStorage.setItem(key, JSON.stringify(value))
|
||||
} catch (error) {
|
||||
console.error('Error setting KV value:', error)
|
||||
}
|
||||
},
|
||||
delete: (key: string) => {
|
||||
try {
|
||||
kvStorage.delete(key)
|
||||
localStorage.removeItem(key)
|
||||
} catch (error) {
|
||||
console.error('Error deleting KV value:', error)
|
||||
}
|
||||
},
|
||||
clear: () => {
|
||||
try {
|
||||
// Get keys before clearing
|
||||
const keysToRemove = Array.from(kvStorage.keys())
|
||||
kvStorage.clear()
|
||||
// Clear corresponding keys from localStorage
|
||||
keysToRemove.forEach(key => localStorage.removeItem(key))
|
||||
} catch (error) {
|
||||
console.error('Error clearing KV storage:', error)
|
||||
}
|
||||
},
|
||||
keys: () => Array.from(kvStorage.keys())
|
||||
},
|
||||
|
||||
llm: llmFunction,
|
||||
|
||||
user: {
|
||||
getCurrentUser: () => ({
|
||||
id: 'mock-user-id',
|
||||
name: 'Mock User',
|
||||
email: 'mock@example.com'
|
||||
}),
|
||||
isAuthenticated: () => true
|
||||
}
|
||||
}
|
||||
22
packages/spark-tools/src/lib/spark.ts
Normal file
22
packages/spark-tools/src/lib/spark.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* Spark Initialization Module
|
||||
*
|
||||
* This module initializes the Spark runtime and makes it available globally
|
||||
* via window.spark. It should be imported early in the application lifecycle.
|
||||
*/
|
||||
|
||||
import { sparkRuntime } from './spark-runtime'
|
||||
|
||||
// Declare global window.spark
|
||||
declare global {
|
||||
interface Window {
|
||||
spark: typeof sparkRuntime
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize window.spark
|
||||
if (typeof window !== 'undefined') {
|
||||
window.spark = sparkRuntime
|
||||
}
|
||||
|
||||
export default sparkRuntime
|
||||
8
packages/spark-tools/src/spark.ts
Normal file
8
packages/spark-tools/src/spark.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* Spark Module - Main export for spark runtime
|
||||
*
|
||||
* Re-export spark runtime for '@github/spark/spark' imports
|
||||
*/
|
||||
|
||||
export { default } from './lib/spark'
|
||||
export { sparkRuntime } from './lib/spark-runtime'
|
||||
26
packages/spark-tools/src/sparkVitePlugin.ts
Normal file
26
packages/spark-tools/src/sparkVitePlugin.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
/**
|
||||
* Spark Vite Plugin
|
||||
*
|
||||
* This plugin integrates Spark functionality into the Vite build process.
|
||||
* Currently provides a minimal implementation that can be extended with:
|
||||
* - Spark runtime injection
|
||||
* - Configuration validation
|
||||
* - Development server enhancements
|
||||
*/
|
||||
|
||||
export default function sparkPlugin() {
|
||||
return {
|
||||
name: 'spark-vite-plugin',
|
||||
|
||||
configResolved(config: any) {
|
||||
// TODO: Add Spark-specific configuration handling if needed
|
||||
// This hook is called after the Vite config is resolved
|
||||
},
|
||||
|
||||
transformIndexHtml(html: string) {
|
||||
// TODO: Add Spark runtime injection to HTML if needed
|
||||
// Currently returns HTML unchanged
|
||||
return html
|
||||
}
|
||||
}
|
||||
}
|
||||
30
packages/spark-tools/src/vitePhosphorIconProxyPlugin.ts
Normal file
30
packages/spark-tools/src/vitePhosphorIconProxyPlugin.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
/**
|
||||
* Vite Phosphor Icon Proxy Plugin
|
||||
*
|
||||
* This plugin provides a proxy for Phosphor icon imports.
|
||||
* Currently provides a pass-through implementation that allows
|
||||
* Vite to handle icon imports normally. Can be extended to:
|
||||
* - Optimize icon bundle sizes
|
||||
* - Implement lazy loading for icons
|
||||
* - Transform icon imports for better tree-shaking
|
||||
*/
|
||||
|
||||
export default function createIconImportProxy() {
|
||||
return {
|
||||
name: 'vite-phosphor-icon-proxy',
|
||||
|
||||
resolveId(id: string) {
|
||||
// TODO: Add custom icon resolution if needed
|
||||
// Currently lets Vite handle all icon imports normally
|
||||
if (id.includes('@phosphor-icons/react')) {
|
||||
return null // Let Vite handle it normally
|
||||
}
|
||||
},
|
||||
|
||||
transform(code: string, id: string) {
|
||||
// TODO: Add icon import transformations if needed
|
||||
// Currently returns null to let Vite handle transformations
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
21
packages/spark-tools/tsconfig.json
Normal file
21
packages/spark-tools/tsconfig.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"module": "ESNext",
|
||||
"lib": ["ES2020", "DOM", "DOM.Iterable"],
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": true,
|
||||
"jsx": "react-jsx",
|
||||
"allowSyntheticDefaultImports": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
Reference in New Issue
Block a user