1
0
This commit is contained in:
2024-04-08 02:50:00 +09:00
parent e5ed214234
commit d3d786d2ef
88 changed files with 44125 additions and 1435 deletions

View File

@ -0,0 +1,131 @@
import util from "util";
import cp from "child_process";
import {parseVersion} from "./helpers/parser.js";
import githubAxios from "./githubAxios.js";
import memoize from 'memoizee';
const exec = util.promisify(cp.exec);
export default class GithubAPI {
constructor(owner, repo) {
if (!owner) {
throw new Error('repo owner must be specified');
}
if (!repo) {
throw new Error('repo must be specified');
}
this.repo = repo;
this.owner = owner;
this.axios = githubAxios.create({
baseURL: `https://api.github.com/repos/${this.owner}/${this.repo}/`,
})
}
async createComment(issue, body) {
return (await this.axios.post(`/issues/${issue}/comments`, {body})).data;
}
async getComments(issue, {desc = false, per_page= 100, page = 1} = {}) {
return (await this.axios.get(`/issues/${issue}/comments`, {params: {direction: desc ? 'desc' : 'asc', per_page, page}})).data;
}
async getComment(id) {
return (await this.axios.get(`/issues/comments/${id}`)).data;
}
async updateComment(id, body) {
return (await this.axios.patch(`/issues/comments/${id}`, {body})).data;
}
async appendLabels(issue, labels) {
return (await this.axios.post(`/issues/${issue}/labels`, {labels})).data;
}
async getUser(user) {
return (await githubAxios.get(`/users/${user}`)).data;
}
async isCollaborator(user) {
try {
return (await this.axios.get(`/collaborators/${user}`)).status === 204;
} catch (e) {
}
}
async deleteLabel(issue, label) {
return (await this.axios.delete(`/issues/${issue}/labels/${label}`)).data;
}
async getIssue(issue) {
return (await this.axios.get(`/issues/${issue}`)).data;
}
async getPR(issue) {
return (await this.axios.get(`/pulls/${issue}`)).data;
}
async getIssues({state= 'open', labels, sort = 'created', desc = false, per_page = 100, page = 1}) {
return (await this.axios.get(`/issues`, {params: {state, labels, sort, direction: desc ? 'desc' : 'asc', per_page, page}})).data;
}
async updateIssue(issue, data) {
return (await this.axios.patch(`/issues/${issue}`, data)).data;
}
async closeIssue(issue) {
return this.updateIssue(issue, {
state: "closed"
})
}
async getReleases({per_page = 30, page= 1} = {}) {
return (await this.axios.get(`/releases`, {params: {per_page, page}})).data;
}
async getRelease(release = 'latest') {
return (await this.axios.get(parseVersion(release) ? `/releases/tags/${release}` : `/releases/${release}`)).data;
}
async getTags({per_page = 30, page= 1} = {}) {
return (await this.axios.get(`/tags`, {params: {per_page, page}})).data;
}
async reopenIssue(issue) {
return this.updateIssue(issue, {
state: "open"
})
}
static async getTagRef(tag) {
try {
return (await exec(`git show-ref --tags "refs/tags/${tag}"`)).stdout.split(' ')[0];
} catch (e) {
}
}
static async getLatestTag() {
try{
const {stdout} = await exec(`git for-each-ref refs/tags --sort=-taggerdate --format='%(refname)' --count=1`);
return stdout.split('/').pop();
} catch (e) {}
}
static normalizeTag(tag){
return tag ? 'v' + tag.replace(/^v/, '') : '';
}
}
const {prototype} = GithubAPI;
['getUser', 'isCollaborator'].forEach(methodName => {
prototype[methodName] = memoize(prototype[methodName], { promise: true })
});
['get', 'post', 'put', 'delete', 'isAxiosError'].forEach((method) => prototype[method] = function(...args){
return this.axios[method](...args);
});

View File

@ -0,0 +1,128 @@
import GithubAPI from "./GithubAPI.js";
import api from './api.js';
import Handlebars from "handlebars";
import fs from "fs/promises";
import {colorize} from "./helpers/colorize.js";
import {getReleaseInfo} from "./contributors.js";
import path from "path";
import {fileURLToPath} from "url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const NOTIFY_PR_TEMPLATE = path.resolve(__dirname, '../templates/pr_published.hbs');
const normalizeTag = (tag) => tag ? 'v' + tag.replace(/^v/, '') : '';
const GITHUB_BOT_LOGIN = 'github-actions[bot]';
const skipCollaboratorPRs = true;
class RepoBot {
constructor(options) {
const {
owner, repo,
templates
} = options || {};
this.templates = Object.assign({
published: NOTIFY_PR_TEMPLATE
}, templates);
this.github = api || new GithubAPI(owner, repo);
this.owner = this.github.owner;
this.repo = this.github.repo;
}
async addComment(targetId, message) {
return this.github.createComment(targetId, message);
}
async notifyPRPublished(id, tag) {
let pr;
try {
pr = await this.github.getPR(id);
} catch (err) {
if(err.response?.status === 404) {
throw new Error(`PR #${id} not found (404)`);
}
throw err;
}
tag = normalizeTag(tag);
const {merged, labels, user: {login, type}} = pr;
const isBot = type === 'Bot';
if (!merged) {
return false
}
await this.github.appendLabels(id, [tag]);
if (isBot || labels.find(({name}) => name === 'automated pr') || (skipCollaboratorPRs && await this.github.isCollaborator(login))) {
return false;
}
const comments = await this.github.getComments(id, {desc: true});
const comment = comments.find(
({body, user}) => user.login === GITHUB_BOT_LOGIN && body.indexOf('published in') >= 0
)
if (comment) {
console.log(colorize()`Release comment [${comment.html_url}] already exists in #${pr.id}`);
return false;
}
const author = await this.github.getUser(login);
author.isBot = isBot;
const message = await this.constructor.renderTemplate(this.templates.published, {
id,
author,
release: {
tag,
url: `https://github.com/${this.owner}/${this.repo}/releases/tag/${tag}`
}
});
return await this.addComment(id, message);
}
async notifyPublishedPRs(tag) {
tag = normalizeTag(tag);
const release = await getReleaseInfo(tag);
if (!release) {
throw Error(colorize()`Can't get release info for ${tag}`);
}
const {merges} = release;
console.log(colorize()`Found ${merges.length} PRs in ${tag}:`);
let i = 0;
for (const pr of merges) {
try {
console.log(colorize()`${i++}) Notify PR #${pr.id}`)
const result = await this.notifyPRPublished(pr.id, tag);
console.log('✔️', result ? 'Label, comment' : 'Label');
} catch (err) {
console.warn(colorize('green', 'red')`❌ Failed notify PR ${pr.id}: ${err.message}`);
}
}
}
static async renderTemplate(template, data) {
return Handlebars.compile(String(await fs.readFile(template)))(data);
}
}
export default RepoBot;

View File

@ -0,0 +1,28 @@
import minimist from "minimist";
import RepoBot from '../RepoBot.js';
import fs from 'fs/promises';
const argv = minimist(process.argv.slice(2));
console.log(argv);
let {tag} = argv;
(async() => {
if (!tag || tag === true) {
const {version} = JSON.parse((await fs.readFile('./package.json')).toString());
tag = 'v' + version;
} else if (typeof tag !== 'string') {
throw new Error('tag must be a string');
}
const bot = new RepoBot();
try {
await bot.notifyPublishedPRs(tag);
} catch (err) {
console.warn('Error:', err.message);
}
})();

View File

@ -0,0 +1,3 @@
import GithubAPI from "./GithubAPI.js";
export default new GithubAPI('axios', 'axios');

View File

@ -0,0 +1,29 @@
import fs from 'fs';
import assert from 'assert';
import axios from '../index.js';
import axiosBuild from '../dist/node/axios.cjs';
const {version} = JSON.parse(fs.readFileSync('./package.json'));
console.log('Checking versions...\n----------------------------')
console.log(`Package version: v${version}`);
console.log(`Axios version: v${axios.VERSION}`);
console.log(`Axios build version: v${axiosBuild.VERSION}`);
console.log(`----------------------------`);
assert.strictEqual(
version,
axios.VERSION,
`Version mismatch between package and Axios ${version} != ${axios.VERSION}`
);
assert.strictEqual(
version,
axiosBuild.VERSION,
`Version mismatch between package and build ${version} != ${axiosBuild.VERSION}`
);
console.log('✔️ PASSED\n');

View File

@ -0,0 +1,241 @@
import axios from "./githubAxios.js";
import util from "util";
import cp from "child_process";
import Handlebars from "handlebars";
import fs from "fs/promises";
import {colorize} from "./helpers/colorize.js";
const exec = util.promisify(cp.exec);
const ONE_MB = 1024 * 1024;
const removeExtraLineBreaks = (str) => str.replace(/(?:\r\n|\r|\n){3,}/gm, '\r\n\r\n');
const cleanTemplate = template => template
.replace(/\n +/g, '\n')
.replace(/^ +/, '')
.replace(/\n\n\n+/g, '\n\n')
.replace(/\n\n$/, '\n');
const getUserFromCommit = ((commitCache) => async (sha) => {
try {
if(commitCache[sha] !== undefined) {
return commitCache[sha];
}
console.log(colorize()`fetch github commit info (${sha})`);
const {data} = await axios.get(`https://api.github.com/repos/axios/axios/commits/${sha}`);
return commitCache[sha] = {
...data.commit.author,
...data.author,
avatar_url_sm: data.author.avatar_url ? data.author.avatar_url + '&s=18' : '',
};
} catch (err) {
return commitCache[sha] = null;
}
})({});
const getIssueById = ((cache) => async (id) => {
if(cache[id] !== undefined) {
return cache[id];
}
try {
const {data} = await axios.get(`https://api.github.com/repos/axios/axios/issues/${id}`);
return cache[id] = data;
} catch (err) {
return null;
}
})({});
const getUserInfo = ((userCache) => async (userEntry) => {
const {email, commits} = userEntry;
if (userCache[email] !== undefined) {
return userCache[email];
}
console.log(colorize()`fetch github user info [${userEntry.name}]`);
return userCache[email] = {
...userEntry,
...await getUserFromCommit(commits[0].hash)
}
})({});
const deduplicate = (authors) => {
const loginsMap = {};
const combined= {};
const assign = (a, b) => {
const {insertions, deletions, points, ...rest} = b;
Object.assign(a, rest);
a.insertions += insertions;
a.deletions += insertions;
a.insertions += insertions;
}
for(const [email, user] of Object.entries(authors)) {
const {login} = user;
let entry;
if(login && (entry = loginsMap[login])) {
assign(entry, user);
} else {
login && (loginsMap[login] = user);
combined[email] = user;
}
}
return combined;
}
const getReleaseInfo = ((releaseCache) => async (tag) => {
if(releaseCache[tag] !== undefined) {
return releaseCache[tag];
}
const isUnreleasedTag = !tag;
const version = 'v' + tag.replace(/^v/, '');
const command = isUnreleasedTag ?
`npx auto-changelog --unreleased-only --stdout --commit-limit false --template json` :
`npx auto-changelog ${
version ? '--starting-version ' + version + ' --ending-version ' + version : ''
} --stdout --commit-limit false --template json`;
console.log(command);
const {stdout} = await exec(command, {maxBuffer: 10 * ONE_MB});
const release = JSON.parse(stdout)[0];
if(release) {
const authors = {};
const commits = [
...release.commits,
...release.fixes.map(fix => fix.commit),
...release.merges.map(fix => fix.commit)
].filter(Boolean);
const commitMergeMap = {};
for(const merge of release.merges) {
commitMergeMap[merge.commit.hash] = merge.id;
}
for (const {hash, author, email, insertions, deletions} of commits) {
const entry = authors[email] = (authors[email] || {
name: author,
prs: [],
email,
commits: [],
insertions: 0, deletions: 0
});
entry.commits.push({hash});
let pr;
if((pr = commitMergeMap[hash])) {
entry.prs.push(pr);
}
console.log(colorize()`Found commit [${hash}]`);
entry.displayName = entry.name || author || entry.login;
entry.github = entry.login ? `https://github.com/${encodeURIComponent(entry.login)}` : '';
entry.insertions += insertions;
entry.deletions += deletions;
entry.points = entry.insertions + entry.deletions;
}
for (const [email, author] of Object.entries(authors)) {
const entry = authors[email] = await getUserInfo(author);
entry.isBot = entry.type === "Bot";
}
release.authors = Object.values(deduplicate(authors))
.sort((a, b) => b.points - a.points);
release.allCommits = commits;
}
releaseCache[tag] = release;
return release;
})({});
const renderContributorsList = async (tag, template) => {
const release = await getReleaseInfo(tag);
const compile = Handlebars.compile(String(await fs.readFile(template)))
const content = compile(release);
return removeExtraLineBreaks(cleanTemplate(content));
}
const renderPRsList = async (tag, template, {comments_threshold= 5, awesome_threshold= 5, label = 'add_to_changelog'} = {}) => {
const release = await getReleaseInfo(tag);
const prs = {};
for(const merge of release.merges) {
const pr = await getIssueById(merge.id);
if (pr && pr.labels.find(({name})=> name === label)) {
const {reactions, body} = pr;
prs[pr.number] = pr;
pr.isHot = pr.comments > comments_threshold;
const points = reactions['+1'] +
reactions['hooray'] + reactions['rocket'] + reactions['heart'] + reactions['laugh'] - reactions['-1'];
pr.isAwesome = points > awesome_threshold;
let match;
pr.messages = [];
if (body) {
const reg = /```+changelog\n*(.+?)?\n*```/gms;
while((match = reg.exec(body))) {
match[1] && pr.messages.push(match[1]);
}
}
}
}
release.prs = Object.values(prs);
const compile = Handlebars.compile(String(await fs.readFile(template)))
const content = compile(release);
return removeExtraLineBreaks(cleanTemplate(content));
}
const getTagRef = async (tag) => {
try {
return (await exec(`git show-ref --tags "refs/tags/${tag}"`)).stdout.split(' ')[0];
} catch(e) {
}
}
export {
renderContributorsList,
getReleaseInfo,
renderPRsList,
getTagRef
}

View File

@ -0,0 +1,19 @@
import axios from '../index.js';
import {colorize} from "./helpers/colorize.js";
const {GITHUB_TOKEN} = process.env;
GITHUB_TOKEN ? console.log(`[GITHUB_TOKEN OK]`) : console.warn(`[GITHUB_TOKEN is not defined]`);
const defaultTransform = axios.defaults.transformRequest;
export default axios.create({
transformRequest: [defaultTransform[0], function (data) {
console.log(colorize()`[${this.method.toUpperCase()}] Request [${new URL(axios.getUri(this)).pathname}]`);
return data;
}],
baseURL: 'https://api.github.com/',
headers: {
Authorization: GITHUB_TOKEN ? `token ${GITHUB_TOKEN}` : null
}
});

View File

@ -0,0 +1,14 @@
import chalk from 'chalk';
export const colorize = (...colors)=> {
if(!colors.length) {
colors = ['green', 'cyan', 'magenta', 'blue', 'yellow', 'red'];
}
const colorsCount = colors.length;
return (strings, ...values) => {
const {length} = values;
return strings.map((str, i) => i < length ? str + chalk[colors[i%colorsCount]].bold(values[i]) : str).join('');
}
}

View File

@ -0,0 +1,12 @@
export const matchAll = (text, regexp, cb) => {
let match;
while((match = regexp.exec(text))) {
cb(match);
}
}
export const parseSection = (body, name, cb) => {
matchAll(body, new RegExp(`^(#+)\\s+${name}?(.*?)^\\1\\s+\\w+`, 'gims'), cb);
}
export const parseVersion = (rawVersion) => /^v?(\d+).(\d+).(\d+)/.exec(rawVersion);

View File

@ -0,0 +1,78 @@
import fs from 'fs/promises';
import path from 'path';
import {renderContributorsList, getTagRef, renderPRsList} from './contributors.js';
import asyncReplace from 'string-replace-async';
import {fileURLToPath} from "url";
import {colorize} from "./helpers/colorize.js";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const CONTRIBUTORS_TEMPLATE = path.resolve(__dirname, '../templates/contributors.hbs');
const PRS_TEMPLATE = path.resolve(__dirname, '../templates/prs.hbs');
const injectSection = async (name, contributorsRE, injector, infile = '../CHANGELOG.md') => {
console.log(colorize()`Checking ${name} sections in ${infile}`);
infile = path.resolve(__dirname, infile);
const content = String(await fs.readFile(infile));
const headerRE = /^#+\s+\[([-_\d.\w]+)].+?$/mig;
let tag;
let index = 0;
let isFirstTag = true;
const newContent = await asyncReplace(content, headerRE, async (match, nextTag, offset) => {
const releaseContent = content.slice(index, offset);
const hasSection = contributorsRE.test(releaseContent);
const currentTag = tag;
tag = nextTag;
index = offset + match.length;
if(currentTag) {
if (hasSection) {
console.log(colorize()`[${currentTag}]: ✓ OK`);
} else {
const target = isFirstTag && (!await getTagRef(currentTag)) ? '' : currentTag;
console.log(colorize()`[${currentTag}]: ❌ MISSED` + (!target ? ' (UNRELEASED)' : ''));
isFirstTag = false;
console.log(`Generating section...`);
const section = await injector(target);
if (!section) {
return match;
}
console.log(colorize()`\nRENDERED SECTION [${name}] for [${currentTag}]:`);
console.log('-------------BEGIN--------------\n');
console.log(section);
console.log('--------------END---------------\n');
return section + '\n' + match;
}
}
return match;
});
await fs.writeFile(infile, newContent);
}
await injectSection(
'PRs',
/^\s*### PRs/mi,
(tag) => tag ? '' : renderPRsList(tag, PRS_TEMPLATE, {awesome_threshold: 5, comments_threshold: 7}),
);
await injectSection(
'contributors',
/^\s*### Contributors/mi,
(tag) => renderContributorsList(tag, CONTRIBUTORS_TEMPLATE)
);

View File

@ -0,0 +1,75 @@
import util from "util";
import cp from "child_process";
import Handlebars from "handlebars";
import fs from "fs/promises";
import prettyBytes from 'pretty-bytes';
import {gzipSize} from 'gzip-size';
const exec = util.promisify(cp.exec);
const getBlobHistory = async (filepath, maxCount= 5) => {
const log = (await exec(
`git log --max-count=${maxCount} --no-walk --tags=v* --oneline --format=%H%d -- ${filepath}`
)).stdout;
const commits = [];
let match;
const regexp = /^(\w+) \(tag: (v?[.\d]+)\)$/gm;
while((match = regexp.exec(log))) {
commits.push({
sha: match[1],
tag: match[2],
size: await getBlobSize(filepath, match[1])
})
}
return commits;
}
const getBlobSize = async (filepath, sha ='HEAD') => {
const size = (await exec(
`git cat-file -s ${sha}:${filepath}`
)).stdout;
return size ? +size : 0;
}
const generateFileReport = async (files) => {
const stat = {};
for(const [name, file] of Object.entries(files)) {
const commits = await getBlobHistory(file);
stat[file] = {
name,
size: (await fs.stat(file)).size,
path: file,
gzip: await gzipSize(String(await fs.readFile(file))),
commits,
history: commits.map(({tag, size}) => `${prettyBytes(size)} (${tag})`).join(' ← ')
}
}
return stat;
}
const generateBody = async ({files, template = './templates/pr.hbs'} = {}) => {
const data = {
files: await generateFileReport(files)
};
Handlebars.registerHelper('filesize', (bytes)=> prettyBytes(bytes));
return Handlebars.compile(String(await fs.readFile(template)))(data);
}
console.log(await generateBody({
files: {
'Browser build (UMD)' : './dist/axios.min.js',
'Browser build (ESM)' : './dist/esm/axios.min.js',
}
}));

View File

@ -0,0 +1,22 @@
import {spawn} from 'child_process';
const args = process.argv.slice(2);
console.log(`Running ${args.join(' ')} on ${process.version}\n`);
const match = /v(\d+)/.exec(process.version);
const isHotfixNeeded = match && match[1] > 16;
isHotfixNeeded && console.warn('Setting --openssl-legacy-provider as ssl hotfix');
const test = spawn('cross-env',
isHotfixNeeded ? ['NODE_OPTIONS=--openssl-legacy-provider', ...args] : args, {
shell: true,
stdio: 'inherit'
}
);
test.on('exit', function (code) {
process.exit(code)
})