Skip to content

Commit

Permalink
no sha route fix
Browse files Browse the repository at this point in the history
  • Loading branch information
Aslemammad committed Mar 26, 2024
1 parent 4ca5829 commit 3115fea
Show file tree
Hide file tree
Showing 8 changed files with 101 additions and 53 deletions.
28 changes: 8 additions & 20 deletions e2e/publish.test.mts
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,8 @@ await wp({ port: PORT });
GITHUB_REPOSITORY: workflowJobQueuedFixture.payload.repository.full_name,
GITHUB_RUN_ID: workflowJobQueuedFixture.payload.workflow_job.run_id,
GITHUB_RUN_ATTEMPT: workflowJobQueuedFixture.payload.workflow_job.run_attempt,
GITHUB_ACTOR_ID:workflowJobQueuedFixture.payload.sender.id ,
GITHUB_ACTOR_ID:workflowJobQueuedFixture.payload.sender.id,
GITHUB_SHA:workflowJobQueuedFixture.payload.workflow_job.head_sha,
}).map(([k, v]) => `${k}=${v}`).join(' ')
await ezSpawn.async(`${env} pnpm --filter=playground run publish`, [], {
stdio: 'inherit',
Expand All @@ -82,17 +83,15 @@ await wp({ port: PORT });
})

const playgroundShaBlob =await playgroundShaData.blob()
console.log(playgroundShaBlob)
assert.ok(!!playgroundShaBlob.size, "playground size should not be zero")
assert.equal(playgroundShaData.status, 200, "playground response should be 200")

// const playgroundWithoutShaUrl = new URL('/stackblitz-labs/stackblitz-ci/main/playground', serverUrl)
// const playgroundWithoutShaData = await fetch(playgroundWithoutShaUrl, {
// method: 'GET',
// })
// const playgroundWithoutShaBlob = await playgroundWithoutShaData.blob()
// console.log('sha url and non-sha url', playgroundShaBlob.arrayBuffer, playgroundWithoutShaBlob.arrayBuffer)
// assert.deepEqual(await playgroundShaBlob.arrayBuffer(), await playgroundWithoutShaBlob.arrayBuffer(), "sha urls and non-sha urls should not give different results")
const playgroundWithoutShaUrl = new URL('/stackblitz-labs/stackblitz-ci/main/playground', serverUrl)
const playgroundWithoutShaData = await fetch(playgroundWithoutShaUrl, {
method: 'GET',
})
const playgroundWithoutShaBlob = await playgroundWithoutShaData.blob()
assert.deepEqual(await playgroundShaBlob.arrayBuffer(), await playgroundWithoutShaBlob.arrayBuffer(), "sha urls and non-sha urls should not give different results")
}
{
playgroundShaUrl.searchParams.set('id', Date.now().toString())
Expand All @@ -104,14 +103,3 @@ await wp({ port: PORT });
}

}





// // c.abort();
// // await p

// process.on('beforeExit', () => {
// c.abort()
// })
Original file line number Diff line number Diff line change
Expand Up @@ -7,21 +7,20 @@ type Params = Omit<WorkflowData, "sha"> & {

export default eventHandler(async (event) => {
const params = getRouterParams(event) as Params;
const packagesBucket = usePackagesBucket();

const { package: packageName, ...hashPrefixMetadata } = params;
const metadataHash = sha256(objectHash(hashPrefixMetadata));
const keys = await packagesBucket.getKeys(metadataHash)
console.log(keys)
// const packageKey = `${metadataHash}:${sha}:${packageName.split('.tgz')[0]}`;
// if (!(await packagesBucket.hasItem(packageKey))) {
// throw createError({
// status: 404,
// });
// }
// const buffer = await packagesBucket.getItemRaw<ArrayBuffer>(packageKey);

// setResponseHeader(event, "content-type", "application/tar+gzip");
// // add caching
// return new Response(buffer);
const cursorBucket = useCursorBucket();
if (!(await cursorBucket.hasItem(metadataHash))) {
throw createError({
status: 404,
});
}
const currentCursor = (await cursorBucket.getItem(metadataHash))!;

sendRedirect(
event,
`/${params.orgOrAuthor}/${params.repo}/${params.ref}/${currentCursor.sha}/${params.package}`
);
});
51 changes: 40 additions & 11 deletions packages/backend/server/routes/publish.post.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { objectHash, sha256 } from "ohash";
import {generateCommitPublishMessage} from '../utils/markdown'
import { generateCommitPublishMessage } from "../utils/markdown";

export default eventHandler(async (event) => {
const contentLength = Number(getHeader(event, "content-length"));
Expand All @@ -8,39 +8,68 @@ export default eventHandler(async (event) => {
// Payload too large
return new Response("Max size limit is 5mb", { status: 413 });
}
const key = getRequestHeader(event, "sb-key");
const {
"sb-package-name": packageName,
"sb-commit-timestamp": commitTimestampStr,
"sb-key": key
} = getHeaders(event);
if (!key || !packageName || !commitTimestampStr) {
throw createError({
statusCode: 400,
message: "sb-package-name, sb-commit-timestamp, sb-key headers are required"
})
}

const workflowsBucket = useWorkflowsBucket();
const packagesBucket = usePackagesBucket();

const cursorBucket = useCursorBucket();
if (!(await workflowsBucket.hasItem(key))) {
return new Response("", { status: 401 });
throw createError({
statusCode: 401,
message: "Try publishing from a github workflow"
})
}

const binary = await readRawBody(event, false);
const { "sb-package-name": packageName, "sb-package-version": _ } =
getHeaders(event);

if (!packageName || !commitTimestampStr) {
throw createError({
statusCode: 400,
message: "sb-key header is missing"
})

}
const commitTimestamp = Number(commitTimestampStr);

const workflowData = await workflowsBucket.getItem(key);
const workflowData = (await workflowsBucket.getItem(key))!;
const { sha, ...hashPrefixMetadata } = workflowData;
const metadataHash = sha256(objectHash(hashPrefixMetadata));
const packageKey = `${metadataHash}:${sha}:${packageName}`;

const currentCursor = await cursorBucket.getItem(metadataHash);

await packagesBucket.setItemRaw(packageKey, binary);
if (!currentCursor || currentCursor.timestamp < commitTimestamp) {
await cursorBucket.setItem(metadataHash, {
sha,
timestamp: commitTimestamp,
});
}

await workflowsBucket.removeItem(key);

const app = useOctokitApp(event);
const origin = getRequestURL(event).origin
const origin = getRequestURL(event).origin;

app.octokit.request("POST /repos/{owner}/{repo}/check-runs", {
name: "Stackblitz CR (Publish)",
owner: workflowData.orgOrAuthor,
repo: workflowData.repo,
head_sha: sha,
output: {
title: 'Stackblitz CR',
summary: 'Published successfully.',
text: generateCommitPublishMessage(origin, packageName, workflowData)
title: "Stackblitz CR",
summary: "Published successfully.",
text: generateCommitPublishMessage(origin, packageName, workflowData),
},
conclusion: "success",
});
Expand Down
5 changes: 5 additions & 0 deletions packages/backend/server/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,8 @@ export type WorkflowData = {
sha: string,
ref: string // branch
}

export type Cursor = {
timestamp: number
sha: string
}
22 changes: 15 additions & 7 deletions packages/backend/server/utils/bucket.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { R2Bucket } from "@cloudflare/workers-types";
import {prefixStorage} from 'unstorage'
import { WorkflowData } from "../types";
import type { R2Bucket } from "@cloudflare/workers-types";
import { prefixStorage } from "unstorage";
import { WorkflowData, Cursor } from "../types";

type Binary = Parameters<R2Bucket["put"]>[1];

Expand All @@ -9,11 +9,19 @@ export function useBucket() {
}

export function useWorkflowsBucket() {
const storage = useBucket()
return prefixStorage<WorkflowData>(storage, 'workflows')
const storage = useBucket();
return prefixStorage<WorkflowData>(
storage,
"workflows"
);
}

export function usePackagesBucket() {
const storage = useBucket()
return prefixStorage<ArrayBuffer>(storage, 'packages')
const storage = useBucket();
return prefixStorage<ArrayBuffer>(storage, "packages");
}

export function useCursorBucket() {
const storage = useBucket();
return prefixStorage<Cursor>(storage, "cursor");
}
1 change: 1 addition & 0 deletions packages/backend/tsconfig.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
// https://nitro.unjs.io/guide/typescript
{
"compilerOptions": {
"strict": true,
"skipLibCheck": true
},
"extends": "./.nitro/types/tsconfig.json"
Expand Down
21 changes: 19 additions & 2 deletions packages/cli/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { defineCommand, runMain } from "citty";
import assert from 'node:assert'
import assert from "node:assert";
import path from "path";
import ezSpawn from "@jsdevtools/ez-spawn";
// import { createRequire } from "module";
Expand Down Expand Up @@ -61,6 +61,18 @@ const main = defineCommand({
meta: {},
run: async () => {
await ezSpawn.async("npm pack", { stdio: "inherit" });
const p = await ezSpawn.async(
`git show -s --format=\%ct ${GITHUB_SHA}`,
{ stdio: "overlapped" }
);
console.log(p.stdout, p.stderr)
const commitTimestamp = Number(p.stdout);
console.log("commit timestamp", commitTimestamp);
assert.ok(
!Number.isNaN(commitTimestamp),
"failed at getting commit timestamp"
);

const file = await fs.readFile(`${name}-${version}.tgz`);

const data = await fetch(publishUrl, {
Expand All @@ -69,10 +81,15 @@ const main = defineCommand({
"sb-key": key,
"sb-package-name": name,
"sb-package-version": version,
"sb-commit-timestamp": commitTimestamp,
},
body: file,
});
assert.equal(data.status, 200, `publishing failed: ${await data.text()}`)
assert.equal(
data.status,
200,
`publishing failed: ${await data.text()}`
);

const url = new URL(
`/${GITHUB_REPOSITORY}/${GITHUB_REF_NAME}/${GITHUB_SHA}/${name}`,
Expand Down
1 change: 1 addition & 0 deletions packages/cli/tsconfig.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
// https://nitro.unjs.io/guide/typescript
{
"compilerOptions": {
"strict": true,
"skipLibCheck": true
},
"extends": "../backend/.nitro/types/tsconfig.json"
Expand Down

0 comments on commit 3115fea

Please sign in to comment.