chore: generate sdk into packages/sdk

This commit is contained in:
adamdotdevin 2025-07-22 11:50:51 -05:00
parent 500cea5ce7
commit 10c8b49590
No known key found for this signature in database
GPG key ID: 9CB48779AF150E75
110 changed files with 12576 additions and 201 deletions

View file

@ -0,0 +1,24 @@
const fs = require('fs');
const problems = Object.values(JSON.parse(fs.readFileSync('.attw.json', 'utf-8')).problems)
.flat()
.filter(
(problem) =>
!(
// This is intentional, if the user specifies .mjs they get ESM.
(
(problem.kind === 'CJSResolvesToESM' && problem.entrypoint.endsWith('.mjs')) ||
// This is intentional for backwards compat reasons.
(problem.kind === 'MissingExportEquals' && problem.implementationFileName.endsWith('/index.js')) ||
// this is intentional, we deliberately attempt to import types that may not exist from parent node_modules
// folders to better support various runtimes without triggering automatic type acquisition.
(problem.kind === 'InternalResolutionError' && problem.moduleSpecifier.includes('node_modules'))
)
),
);
fs.unlinkSync('.attw.json');
if (problems.length) {
process.stdout.write('The types are wrong!\n' + JSON.stringify(problems, null, 2) + '\n');
process.exitCode = 1;
} else {
process.stdout.write('Types ok!\n');
}

View file

@ -0,0 +1,9 @@
#!/usr/bin/env bash
# Check if you happen to call prepare for a repository that's already in node_modules.
[ "$(basename "$(dirname "$PWD")")" = 'node_modules' ] ||
# The name of the containing directory that 'npm` uses, which looks like
# $HOME/.npm/_cacache/git-cloneXXXXXX
[ "$(basename "$(dirname "$PWD")")" = 'tmp' ] ||
# The name of the containing directory that 'yarn` uses, which looks like
# $(yarn cache dir)/.tmp/XXXXX
[ "$(basename "$(dirname "$PWD")")" = '.tmp' ]

View file

@ -0,0 +1,20 @@
const fs = require('fs');
const path = require('path');
const main = () => {
const pkg = require('../../package.json');
const version = pkg['version'];
if (!version) throw 'The version property is not set in the package.json file';
if (typeof version !== 'string') {
throw `Unexpected type for the package.json version field; got ${typeof version}, expected string`;
}
const versionFile = path.resolve(__dirname, '..', '..', 'src', 'version.ts');
const contents = fs.readFileSync(versionFile, 'utf8');
const output = contents.replace(/(export const VERSION = ')(.*)(')/g, `$1${version}$3`);
fs.writeFileSync(versionFile, output);
};
if (require.main === module) {
main();
}

View file

@ -0,0 +1,17 @@
const fs = require('fs');
const path = require('path');
const indexJs =
process.env['DIST_PATH'] ?
path.resolve(process.env['DIST_PATH'], 'index.js')
: path.resolve(__dirname, '..', '..', 'dist', 'index.js');
let before = fs.readFileSync(indexJs, 'utf8');
let after = before.replace(
/^(\s*Object\.defineProperty\s*\(exports,\s*["']__esModule["'].+)$/m,
`exports = module.exports = function (...args) {
return new exports.default(...args)
}
$1`.replace(/^ /gm, ''),
);
fs.writeFileSync(indexJs, after, 'utf8');

View file

@ -0,0 +1,13 @@
#!/usr/bin/env bash
set -exuo pipefail
# the package is published to NPM from ./dist
# we want the final file structure for git installs to match the npm installs, so we
# delete everything except ./dist and ./node_modules
find . -maxdepth 1 -mindepth 1 ! -name 'dist' ! -name 'node_modules' -exec rm -rf '{}' +
# move everything from ./dist to .
mv dist/* .
# delete the now-empty ./dist
rmdir dist

View file

@ -0,0 +1,21 @@
const pkgJson = require(process.env['PKG_JSON_PATH'] || '../../package.json');
function processExportMap(m) {
for (const key in m) {
const value = m[key];
if (typeof value === 'string') m[key] = value.replace(/^\.\/dist\//, './');
else processExportMap(value);
}
}
processExportMap(pkgJson.exports);
for (const key of ['types', 'main', 'module']) {
if (typeof pkgJson[key] === 'string') pkgJson[key] = pkgJson[key].replace(/^(\.\/)?dist\//, './');
}
delete pkgJson.devDependencies;
delete pkgJson.scripts.prepack;
delete pkgJson.scripts.prepublishOnly;
delete pkgJson.scripts.prepare;
console.log(JSON.stringify(pkgJson, null, 2));

View file

@ -0,0 +1,94 @@
// @ts-check
const fs = require('fs');
const path = require('path');
const distDir =
process.env['DIST_PATH'] ?
path.resolve(process.env['DIST_PATH'])
: path.resolve(__dirname, '..', '..', 'dist');
async function* walk(dir) {
for await (const d of await fs.promises.opendir(dir)) {
const entry = path.join(dir, d.name);
if (d.isDirectory()) yield* walk(entry);
else if (d.isFile()) yield entry;
}
}
async function postprocess() {
for await (const file of walk(distDir)) {
if (!/(\.d)?[cm]?ts$/.test(file)) continue;
const code = await fs.promises.readFile(file, 'utf8');
// strip out lib="dom", types="node", and types="react" references; these
// are needed at build time, but would pollute the user's TS environment
const transformed = code.replace(
/^ *\/\/\/ *<reference +(lib="dom"|types="(node|react)").*?\n/gm,
// replace with same number of characters to avoid breaking source maps
(match) => ' '.repeat(match.length - 1) + '\n',
);
if (transformed !== code) {
console.error(`wrote ${path.relative(process.cwd(), file)}`);
await fs.promises.writeFile(file, transformed, 'utf8');
}
}
const newExports = {
'.': {
require: {
types: './index.d.ts',
default: './index.js',
},
types: './index.d.mts',
default: './index.mjs',
},
};
for (const entry of await fs.promises.readdir(distDir, { withFileTypes: true })) {
if (entry.isDirectory() && entry.name !== 'src' && entry.name !== 'internal' && entry.name !== 'bin') {
const subpath = './' + entry.name;
newExports[subpath + '/*.mjs'] = {
default: subpath + '/*.mjs',
};
newExports[subpath + '/*.js'] = {
default: subpath + '/*.js',
};
newExports[subpath + '/*'] = {
import: subpath + '/*.mjs',
require: subpath + '/*.js',
};
} else if (entry.isFile() && /\.[cm]?js$/.test(entry.name)) {
const { name, ext } = path.parse(entry.name);
const subpathWithoutExt = './' + name;
const subpath = './' + entry.name;
newExports[subpathWithoutExt] ||= { import: undefined, require: undefined };
const isModule = ext[1] === 'm';
if (isModule) {
newExports[subpathWithoutExt].import = subpath;
} else {
newExports[subpathWithoutExt].require = subpath;
}
newExports[subpath] = {
default: subpath,
};
}
}
await fs.promises.writeFile(
'dist/package.json',
JSON.stringify(
Object.assign(
/** @type {Record<String, unknown>} */ (
JSON.parse(await fs.promises.readFile('dist/package.json', 'utf-8'))
),
{
exports: newExports,
},
),
null,
2,
),
);
}
postprocess();

View file

@ -0,0 +1,25 @@
#!/usr/bin/env bash
set -exuo pipefail
RESPONSE=$(curl -X POST "$URL" \
-H "Authorization: Bearer $AUTH" \
-H "Content-Type: application/json")
SIGNED_URL=$(echo "$RESPONSE" | jq -r '.url')
if [[ "$SIGNED_URL" == "null" ]]; then
echo -e "\033[31mFailed to get signed URL.\033[0m"
exit 1
fi
UPLOAD_RESPONSE=$(tar -cz dist | curl -v -X PUT \
-H "Content-Type: application/gzip" \
--data-binary @- "$SIGNED_URL" 2>&1)
if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then
echo -e "\033[32mUploaded build to Stainless storage.\033[0m"
echo -e "\033[32mInstallation: npm install 'https://pkg.stainless.com/s/opencode-typescript/$SHA'\033[0m"
else
echo -e "\033[31mFailed to upload artifact.\033[0m"
exit 1
fi