Skip to content

[CLI] Invalidate local cache in cachedDownload #2413

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 2 commits into
base: trunk
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 30 additions & 2 deletions packages/nx-extensions/src/executors/package-json/executor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,17 @@ export default async function* packageJsonExecutor(
}

const monorepoDependencies = getMonorepoDependencies(context);

// Read optional dependencies from the original package.json
let originalOptionalDependencies: Record<string, string> | undefined;
const originalPackageJsonPath = `${context.root}/package.json`;
if (fs.existsSync(originalPackageJsonPath)) {
const originalPackageJson = JSON.parse(
fs.readFileSync(originalPackageJsonPath).toString()
);
originalOptionalDependencies = originalPackageJson.optionalDependencies;
}

for await (const event of startBuild(options, context)) {
if (!event.success) {
throw 'There was an error with the build. See above.';
Expand All @@ -54,7 +65,8 @@ export default async function* packageJsonExecutor(
options,
context,
helperDependencies,
monorepoDependencies
monorepoDependencies,
originalOptionalDependencies
);
if (built === false) {
return {
Expand Down Expand Up @@ -87,7 +99,8 @@ async function buildPackageJson(
options: PackageJsonExecutorSchema,
context: ExecutorContext,
helperDependencies: ProjectGraphDependency[],
monorepoDependencies: MonorepoDependency[]
monorepoDependencies: MonorepoDependency[],
originalOptionalDependencies?: Record<string, string>
) {
const packageJson = createPackageJson(
context.projectName,
Expand Down Expand Up @@ -118,6 +131,21 @@ async function buildPackageJson(
packageJson.dependencies[dep.name] = dep.version;
}

// Preserve optionalDependencies from the original package.json
if (originalOptionalDependencies) {
packageJson.optionalDependencies = originalOptionalDependencies;

// Remove optional dependencies from regular dependencies to avoid duplication
for (const optionalDep of Object.keys(originalOptionalDependencies)) {
if (
packageJson.dependencies &&
packageJson.dependencies[optionalDep]
) {
delete packageJson.dependencies[optionalDep];
}
}
}

// make main relative to context root
if (main.startsWith(context.root)) {
main = main.substring(context.root.length).replace(/^\//, '');
Expand Down
99 changes: 93 additions & 6 deletions packages/playground/cli/src/blueprints-v1/download.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,12 @@ import path, { basename } from 'path';

export const CACHE_FOLDER = path.join(os.homedir(), '.wordpress-playground');

interface CacheMetadata {
etag?: string;
lastModified?: string;
downloadedAt: number;
}

export async function fetchSqliteIntegration(
monitor: EmscriptenDownloadMonitor
) {
Expand All @@ -16,30 +22,95 @@ export async function fetchSqliteIntegration(
return sqliteZip;
}

// @TODO: Support HTTP cache, invalidate the local file if the remote file has
// changed
export async function cachedDownload(
remoteUrl: string,
cacheKey: string,
monitor: EmscriptenDownloadMonitor
) {
const artifactPath = path.join(CACHE_FOLDER, cacheKey);
if (!fs.existsSync(artifactPath)) {
fs.ensureDirSync(CACHE_FOLDER);
await downloadTo(remoteUrl, artifactPath, monitor);
const metadataPath = path.join(CACHE_FOLDER, `${cacheKey}.metadata.json`);

// Check if file exists and if it needs to be re-downloaded
if (fs.existsSync(artifactPath)) {
try {
// Load existing metadata for conditional request headers
let metadata: CacheMetadata | null = null;
if (fs.existsSync(metadataPath)) {
try {
metadata = JSON.parse(
fs.readFileSync(metadataPath, 'utf-8')
);
} catch {
// If metadata is corrupted, re-download
metadata = null;
}
}

// Use fetch with conditional headers - it will automatically handle
// If-None-Match and If-Modified-Since based on provided headers
const headers: HeadersInit = {};
if (metadata?.etag) {
headers['If-None-Match'] = metadata.etag;
}
if (metadata?.lastModified) {
headers['If-Modified-Since'] = metadata.lastModified;
}

// Make a conditional request
const response = await fetch(remoteUrl, {
method: 'HEAD',
headers,
cache: 'no-cache',
});

// If we get 304 Not Modified, use the cached version
if (response.status === 304) {
return readAsFile(artifactPath);
}

// If we have headers but no 304, the file has changed
if (metadata && (metadata.etag || metadata.lastModified)) {
// File has changed, will re-download below
} else {
// No cache headers available, fall back to time-based check
// Re-download if file is older than 24 hours
const oneDayAgo = Date.now() - 24 * 60 * 60 * 1000;
if (metadata && metadata.downloadedAt >= oneDayAgo) {
return readAsFile(artifactPath);
}
}
} catch (error) {
// If we can't check the remote file (network issue, etc.),
// use the cached version if it exists
console.warn(
'Unable to check remote file, using cached version:',
error
);
return readAsFile(artifactPath);
}
}

fs.ensureDirSync(CACHE_FOLDER);
await downloadTo(remoteUrl, artifactPath, metadataPath, monitor);
return readAsFile(artifactPath);
}

async function downloadTo(
remoteUrl: string,
localPath: string,
metadataPath: string,
monitor: EmscriptenDownloadMonitor
) {
const response = await monitor.monitorFetch(fetch(remoteUrl));

// Extract cache headers for metadata
const etag = response.headers.get('etag');
const lastModified = response.headers.get('last-modified');

const reader = response.body!.getReader();
const tmpPath = `${localPath}.partial`;
const writer = fs.createWriteStream(tmpPath);

while (true) {
const { done, value } = await reader.read();
if (value) {
Expand All @@ -49,11 +120,24 @@ async function downloadTo(
break;
}
}

writer.close();
if (!writer.closed) {
await new Promise((resolve, reject) => {
writer.on('finish', () => {
fs.renameSync(tmpPath, localPath);

// Save metadata
const metadata: CacheMetadata = {
downloadedAt: Date.now(),
};
if (etag) metadata.etag = etag;
if (lastModified) metadata.lastModified = lastModified;

fs.writeFileSync(
metadataPath,
JSON.stringify(metadata, null, 2)
);
resolve(null);
});
writer.on('error', (err: any) => {
Expand All @@ -65,5 +149,8 @@ async function downloadTo(
}

export function readAsFile(path: string, fileName?: string): File {
return new File([fs.readFileSync(path)], fileName ?? basename(path));
const buffer = fs.readFileSync(path);
// Convert Buffer to Uint8Array to fix the linter error
const uint8Array = new Uint8Array(buffer);
return new File([uint8Array], fileName ?? basename(path));
}
Loading