refactor(eslint): enable @typescript-eslint/no-use-before-define

This commit is contained in:
Rhys Arkins 2019-11-24 05:09:13 +01:00
Родитель 33079e7ec5
Коммит 473b7e1f4b
34 изменённых файлов: 1903 добавлений и 1908 удалений

Просмотреть файл

@ -27,7 +27,6 @@ module.exports = {
// TODO: fix lint
'@typescript-eslint/camelcase': 'off', // disabled until ??
'@typescript-eslint/no-use-before-define': 1,
'@typescript-eslint/no-explicit-any': 0,
'@typescript-eslint/no-non-null-assertion': 0,
'@typescript-eslint/no-unused-vars': [

Просмотреть файл

@ -15,6 +15,126 @@ const datasources = {
gitlab,
};
export function replaceArgs(
obj: string | string[] | object | object[],
argMapping: Record<string, any>
) {
if (is.string(obj)) {
let returnStr = obj;
for (const [arg, argVal] of Object.entries(argMapping)) {
const re = regEx(`{{${arg}}}`, 'g');
returnStr = returnStr.replace(re, argVal);
}
return returnStr;
}
if (is.array(obj)) {
const returnArray = [];
for (const item of obj) {
returnArray.push(replaceArgs(item, argMapping));
}
return returnArray;
}
if (is.object(obj)) {
const returnObj = {};
for (const [key, val] of Object.entries(obj)) {
returnObj[key] = replaceArgs(val, argMapping);
}
return returnObj;
}
return obj;
}
export function parsePreset(input: string): ParsedPreset {
let str = input;
let datasource: string;
let packageName: string;
let presetName: string;
let params: string[];
if (str.startsWith('github>')) {
datasource = 'github';
str = str.substring('github>'.length);
} else if (str.startsWith('gitlab>')) {
datasource = 'gitlab';
str = str.substring('gitlab>'.length);
}
str = str.replace(/^npm>/, '');
datasource = datasource || 'npm';
if (str.includes('(')) {
params = str
.slice(str.indexOf('(') + 1, -1)
.split(',')
.map(elem => elem.trim());
str = str.slice(0, str.indexOf('('));
}
if (str[0] === ':') {
// default namespace
packageName = 'renovate-config-default';
presetName = str.slice(1);
} else if (str[0] === '@') {
// scoped namespace
[, packageName] = str.match(/(@.*?)(:|$)/);
str = str.slice(packageName.length);
if (!packageName.includes('/')) {
packageName += '/renovate-config';
}
if (str === '') {
presetName = 'default';
} else {
presetName = str.slice(1);
}
} else {
// non-scoped namespace
[, packageName] = str.match(/(.*?)(:|$)/);
presetName = str.slice(packageName.length + 1);
if (datasource === 'npm' && !packageName.startsWith('renovate-config-')) {
packageName = `renovate-config-${packageName}`;
}
if (presetName === '') {
presetName = 'default';
}
}
return { datasource, packageName, presetName, params };
}
export async function getPreset(preset: string): Promise<RenovateConfig> {
logger.trace(`getPreset(${preset})`);
const { datasource, packageName, presetName, params } = parsePreset(preset);
let presetConfig = await datasources[datasource].getPreset(
packageName,
presetName
);
logger.trace({ presetConfig }, `Found preset ${preset}`);
if (params) {
const argMapping = {};
for (const [index, value] of params.entries()) {
argMapping[`arg${index}`] = value;
}
presetConfig = replaceArgs(presetConfig, argMapping);
}
logger.trace({ presetConfig }, `Applied params to preset ${preset}`);
const presetKeys = Object.keys(presetConfig);
if (
presetKeys.length === 2 &&
presetKeys.includes('description') &&
presetKeys.includes('extends')
) {
// preset is just a collection of other presets
delete presetConfig.description;
}
const packageListKeys = [
'description',
'packageNames',
'excludePackageNames',
'packagePatterns',
'excludePackagePatterns',
];
if (presetKeys.every(key => packageListKeys.includes(key))) {
delete presetConfig.description;
}
const { migratedConfig } = migration.migrateConfig(presetConfig);
return massage.massageConfig(migratedConfig);
}
export async function resolveConfigPresets(
inputConfig: RenovateConfig,
ignorePresets?: string[],
@ -120,129 +240,9 @@ export async function resolveConfigPresets(
return config;
}
export function replaceArgs(
obj: string | string[] | object | object[],
argMapping: Record<string, any>
) {
if (is.string(obj)) {
let returnStr = obj;
for (const [arg, argVal] of Object.entries(argMapping)) {
const re = regEx(`{{${arg}}}`, 'g');
returnStr = returnStr.replace(re, argVal);
}
return returnStr;
}
if (is.array(obj)) {
const returnArray = [];
for (const item of obj) {
returnArray.push(replaceArgs(item, argMapping));
}
return returnArray;
}
if (is.object(obj)) {
const returnObj = {};
for (const [key, val] of Object.entries(obj)) {
returnObj[key] = replaceArgs(val, argMapping);
}
return returnObj;
}
return obj;
}
export interface ParsedPreset {
datasource: string;
packageName: string;
presetName: string;
params?: string[];
}
export function parsePreset(input: string): ParsedPreset {
let str = input;
let datasource: string;
let packageName: string;
let presetName: string;
let params: string[];
if (str.startsWith('github>')) {
datasource = 'github';
str = str.substring('github>'.length);
} else if (str.startsWith('gitlab>')) {
datasource = 'gitlab';
str = str.substring('gitlab>'.length);
}
str = str.replace(/^npm>/, '');
datasource = datasource || 'npm';
if (str.includes('(')) {
params = str
.slice(str.indexOf('(') + 1, -1)
.split(',')
.map(elem => elem.trim());
str = str.slice(0, str.indexOf('('));
}
if (str[0] === ':') {
// default namespace
packageName = 'renovate-config-default';
presetName = str.slice(1);
} else if (str[0] === '@') {
// scoped namespace
[, packageName] = str.match(/(@.*?)(:|$)/);
str = str.slice(packageName.length);
if (!packageName.includes('/')) {
packageName += '/renovate-config';
}
if (str === '') {
presetName = 'default';
} else {
presetName = str.slice(1);
}
} else {
// non-scoped namespace
[, packageName] = str.match(/(.*?)(:|$)/);
presetName = str.slice(packageName.length + 1);
if (datasource === 'npm' && !packageName.startsWith('renovate-config-')) {
packageName = `renovate-config-${packageName}`;
}
if (presetName === '') {
presetName = 'default';
}
}
return { datasource, packageName, presetName, params };
}
export async function getPreset(preset: string): Promise<RenovateConfig> {
logger.trace(`getPreset(${preset})`);
const { datasource, packageName, presetName, params } = parsePreset(preset);
let presetConfig = await datasources[datasource].getPreset(
packageName,
presetName
);
logger.trace({ presetConfig }, `Found preset ${preset}`);
if (params) {
const argMapping = {};
for (const [index, value] of params.entries()) {
argMapping[`arg${index}`] = value;
}
presetConfig = replaceArgs(presetConfig, argMapping);
}
logger.trace({ presetConfig }, `Applied params to preset ${preset}`);
const presetKeys = Object.keys(presetConfig);
if (
presetKeys.length === 2 &&
presetKeys.includes('description') &&
presetKeys.includes('extends')
) {
// preset is just a collection of other presets
delete presetConfig.description;
}
const packageListKeys = [
'description',
'packageNames',
'excludePackageNames',
'packagePatterns',
'excludePackagePatterns',
];
if (presetKeys.every(key => packageListKeys.includes(key))) {
delete presetConfig.description;
}
const { migratedConfig } = migration.migrateConfig(presetConfig);
return massage.massageConfig(migratedConfig);
}

Просмотреть файл

@ -48,6 +48,38 @@ export function getRegistryRepository(
};
}
function getECRAuthToken(region: string, opts: hostRules.HostRule) {
const config = { region, accessKeyId: undefined, secretAccessKey: undefined };
if (opts.username && opts.password) {
config.accessKeyId = opts.username;
config.secretAccessKey = opts.password;
}
const ecr = new AWS.ECR(config);
return new Promise<string>(resolve => {
ecr.getAuthorizationToken({}, (err, data) => {
if (err) {
logger.trace({ err }, 'err');
logger.info('ECR getAuthorizationToken error');
resolve(null);
} else {
const authorizationToken =
data &&
data.authorizationData &&
data.authorizationData[0] &&
data.authorizationData[0].authorizationToken;
if (authorizationToken) {
resolve(authorizationToken);
} else {
logger.warn(
'Could not extract authorizationToken from ECR getAuthorizationToken response'
);
resolve(null);
}
}
});
});
}
async function getAuthHeaders(
registry: string,
repository: string
@ -376,6 +408,34 @@ async function getTags(
}
}
export function getConfigResponse(url: string, headers: OutgoingHttpHeaders) {
return got(url, {
headers,
hooks: {
beforeRedirect: [
(options: any) => {
if (
options.search &&
options.search.indexOf('X-Amz-Algorithm') !== -1
) {
// if there is no port in the redirect URL string, then delete it from the redirect options.
// This can be evaluated for removal after upgrading to Got v10
const portInUrl = options.href.split('/')[2].split(':')[1];
if (!portInUrl) {
// eslint-disable-next-line no-param-reassign
delete options.port; // Redirect will instead use 80 or 443 for HTTP or HTTPS respectively
}
// docker registry is hosted on amazon, redirect url includes authentication.
// eslint-disable-next-line no-param-reassign
delete options.headers.authorization;
}
},
],
},
});
}
/*
* docker.getLabels
*
@ -496,34 +556,6 @@ async function getLabels(
}
}
export function getConfigResponse(url: string, headers: OutgoingHttpHeaders) {
return got(url, {
headers,
hooks: {
beforeRedirect: [
(options: any) => {
if (
options.search &&
options.search.indexOf('X-Amz-Algorithm') !== -1
) {
// if there is no port in the redirect URL string, then delete it from the redirect options.
// This can be evaluated for removal after upgrading to Got v10
const portInUrl = options.href.split('/')[2].split(':')[1];
if (!portInUrl) {
// eslint-disable-next-line no-param-reassign
delete options.port; // Redirect will instead use 80 or 443 for HTTP or HTTPS respectively
}
// docker registry is hosted on amazon, redirect url includes authentication.
// eslint-disable-next-line no-param-reassign
delete options.headers.authorization;
}
},
],
},
});
}
/**
* docker.getPkgReleases
*
@ -562,35 +594,3 @@ export async function getPkgReleases({
}
return ret;
}
function getECRAuthToken(region: string, opts: hostRules.HostRule) {
const config = { region, accessKeyId: undefined, secretAccessKey: undefined };
if (opts.username && opts.password) {
config.accessKeyId = opts.username;
config.secretAccessKey = opts.password;
}
const ecr = new AWS.ECR(config);
return new Promise<string>(resolve => {
ecr.getAuthorizationToken({}, (err, data) => {
if (err) {
logger.trace({ err }, 'err');
logger.info('ECR getAuthorizationToken error');
resolve(null);
} else {
const authorizationToken =
data &&
data.authorizationData &&
data.authorizationData[0] &&
data.authorizationData[0].authorizationToken;
if (authorizationToken) {
resolve(authorizationToken);
} else {
logger.warn(
'Could not extract authorizationToken from ECR getAuthorizationToken response'
);
resolve(null);
}
}
});
});
}

Просмотреть файл

@ -7,6 +7,26 @@ const glGot = api.get;
const GitLabApiUrl = 'https://gitlab.com/api/v4/projects';
async function getDefaultBranchName(urlEncodedPkgName: string) {
const branchesUrl = `${GitLabApiUrl}/${urlEncodedPkgName}/repository/branches`;
type GlBranch = {
default: boolean;
name: string;
}[];
const res = await glGot<GlBranch>(branchesUrl);
const branches = res.body;
let defautlBranchName = 'master';
for (const branch of branches) {
if (branch.default) {
defautlBranchName = branch.name;
break;
}
}
return defautlBranchName;
}
export async function getPreset(
pkgName: string,
presetName = 'default'
@ -116,23 +136,3 @@ export async function getPkgReleases({
);
return dependency;
}
async function getDefaultBranchName(urlEncodedPkgName: string) {
const branchesUrl = `${GitLabApiUrl}/${urlEncodedPkgName}/repository/branches`;
type GlBranch = {
default: boolean;
name: string;
}[];
const res = await glGot<GlBranch>(branchesUrl);
const branches = res.body;
let defautlBranchName = 'master';
for (const branch of branches) {
if (branch.default) {
defautlBranchName = branch.name;
break;
}
}
return defautlBranchName;
}

Просмотреть файл

@ -4,35 +4,6 @@ import { PkgReleaseConfig, ReleaseResult } from '../common';
import got from '../../util/got';
import { logger } from '../../logger';
export async function getPkgReleases({
lookupName,
registryUrls,
}: PkgReleaseConfig): Promise<ReleaseResult | null> {
if (!lookupName) {
logger.warn(`lookupName was not provided to getPkgReleases`);
return null;
}
const [helmRepository] = registryUrls;
if (!helmRepository) {
logger.warn(`helmRepository was not provided to getPkgReleases`);
return null;
}
const repositoryData = await getRepositoryData(helmRepository);
if (!repositoryData) {
logger.warn(`Couldn't get index.yaml file from ${helmRepository}`);
return null;
}
const releases = repositoryData.find(chart => chart.name === lookupName);
if (!releases) {
logger.warn(
{ dependency: lookupName },
`Entry ${lookupName} doesn't exist in index.yaml from ${helmRepository}`
);
return null;
}
return releases;
}
export async function getRepositoryData(
repository: string
): Promise<ReleaseResult[]> {
@ -89,3 +60,32 @@ export async function getRepositoryData(
return null;
}
}
export async function getPkgReleases({
lookupName,
registryUrls,
}: PkgReleaseConfig): Promise<ReleaseResult | null> {
if (!lookupName) {
logger.warn(`lookupName was not provided to getPkgReleases`);
return null;
}
const [helmRepository] = registryUrls;
if (!helmRepository) {
logger.warn(`helmRepository was not provided to getPkgReleases`);
return null;
}
const repositoryData = await getRepositoryData(helmRepository);
if (!repositoryData) {
logger.warn(`Couldn't get index.yaml file from ${helmRepository}`);
return null;
}
const releases = repositoryData.find(chart => chart.name === lookupName);
if (!releases) {
logger.warn(
{ dependency: lookupName },
`Entry ${lookupName} doesn't exist in index.yaml from ${helmRepository}`
);
return null;
}
return releases;
}

Просмотреть файл

@ -57,6 +57,36 @@ const datasources: Record<string, Datasource> = {
const cacheNamespace = 'datasource-releases';
async function fetchReleases(
config: PkgReleaseConfig
): Promise<ReleaseResult | null> {
const { datasource } = config;
if (!datasource) {
logger.warn('No datasource found');
}
if (!datasources[datasource]) {
logger.warn('Unknown datasource: ' + datasource);
return null;
}
const dep = await datasources[datasource].getPkgReleases(config);
addMetaData(dep, datasource, config.lookupName);
return dep;
}
function getRawReleases(config: PkgReleaseConfig): Promise<ReleaseResult> {
const cacheKey =
cacheNamespace +
config.datasource +
config.lookupName +
config.registryUrls;
// The repoCache is initialized for each repo
// By returning a Promise and reusing it, we should only fetch each package at most once
if (!global.repoCache[cacheKey]) {
global.repoCache[cacheKey] = fetchReleases(config);
}
return global.repoCache[cacheKey];
}
export async function getPkgReleases(config: PkgReleaseConfig) {
const res = await getRawReleases({
...config,
@ -81,36 +111,6 @@ export async function getPkgReleases(config: PkgReleaseConfig) {
return res;
}
function getRawReleases(config: PkgReleaseConfig): Promise<ReleaseResult> {
const cacheKey =
cacheNamespace +
config.datasource +
config.lookupName +
config.registryUrls;
// The repoCache is initialized for each repo
// By returning a Promise and reusing it, we should only fetch each package at most once
if (!global.repoCache[cacheKey]) {
global.repoCache[cacheKey] = fetchReleases(config);
}
return global.repoCache[cacheKey];
}
async function fetchReleases(
config: PkgReleaseConfig
): Promise<ReleaseResult | null> {
const { datasource } = config;
if (!datasource) {
logger.warn('No datasource found');
}
if (!datasources[datasource]) {
logger.warn('Unknown datasource: ' + datasource);
return null;
}
const dep = await datasources[datasource].getPkgReleases(config);
addMetaData(dep, datasource, config.lookupName);
return dep;
}
export function supportsDigests(config: DigestConfig) {
return !!datasources[config.datasource].getDigest;
}

Просмотреть файл

@ -8,6 +8,121 @@ import { containsPlaceholder } from '../../manager/maven/extract';
import { downloadHttpProtocol } from './util';
import { PkgReleaseConfig, ReleaseResult } from '../common';
async function downloadFileProtocol(pkgUrl: url.URL): Promise<string | null> {
const pkgPath = pkgUrl.toString().replace('file://', '');
if (!(await fs.exists(pkgPath))) {
return null;
}
return fs.readFile(pkgPath, 'utf8');
}
async function downloadMavenXml(
dependency: MavenDependency,
repoUrl: string,
dependencyFilePath: string
): Promise<XmlDocument | null> {
let pkgUrl;
try {
pkgUrl = new url.URL(
`${dependency.dependencyUrl}/${dependencyFilePath}`,
repoUrl
);
} catch (err) {
logger.debug(
{ err, dependency, repoUrl, dependencyFilePath },
`Error constructing URL for ${dependency.display}`
);
return null;
}
let rawContent: string;
switch (pkgUrl.protocol) {
case 'file:':
rawContent = await downloadFileProtocol(pkgUrl);
break;
case 'http:':
case 'https:':
rawContent = await downloadHttpProtocol(pkgUrl);
break;
case 's3:':
logger.debug('Skipping s3 dependency');
return null;
default:
logger.warn(
`Invalid protocol ${pkgUrl.protocol} in repository ${repoUrl}`
);
return null;
}
if (!rawContent) {
logger.debug(`${dependency.display} not found in repository ${repoUrl}`);
return null;
}
try {
return new XmlDocument(rawContent);
} catch (e) {
logger.debug(`Can not parse ${pkgUrl.href} for ${dependency.display}`);
return null;
}
}
async function getDependencyInfo(
dependency: MavenDependency,
repoUrl: string,
version: string
): Promise<Partial<ReleaseResult>> {
const result: Partial<ReleaseResult> = {};
const path = `${version}/${dependency.name}-${version}.pom`;
const pomContent = await downloadMavenXml(dependency, repoUrl, path);
if (!pomContent) return result;
const homepage = pomContent.valueWithPath('url');
if (homepage && !containsPlaceholder(homepage)) {
result.homepage = homepage;
}
const sourceUrl = pomContent.valueWithPath('scm.url');
if (sourceUrl && !containsPlaceholder(sourceUrl)) {
result.sourceUrl = sourceUrl.replace(/^scm:/, '');
}
return result;
}
function getLatestVersion(versions: string[]): string | null {
if (versions.length === 0) return null;
return versions.reduce((latestVersion, version) =>
compare(version, latestVersion) === 1 ? version : latestVersion
);
}
interface MavenDependency {
display: string;
group?: string;
name?: string;
dependencyUrl: string;
}
function getDependencyParts(lookupName: string): MavenDependency {
const [group, name] = lookupName.split(':');
const dependencyUrl = `${group.replace(/\./g, '/')}/${name}`;
return {
display: lookupName,
group,
name,
dependencyUrl,
};
}
function extractVersions(metadata: XmlDocument): string[] {
const versions = metadata.descendantWithPath('versioning.versions');
const elements = versions && versions.childrenNamed('version');
if (!elements) return [];
return elements.map(el => el.val);
}
export async function getPkgReleases({
lookupName,
registryUrls,
@ -67,118 +182,3 @@ export async function getPkgReleases({
releases: versions.map(v => ({ version: v })),
};
}
function getDependencyParts(lookupName: string): MavenDependency {
const [group, name] = lookupName.split(':');
const dependencyUrl = `${group.replace(/\./g, '/')}/${name}`;
return {
display: lookupName,
group,
name,
dependencyUrl,
};
}
interface MavenDependency {
display: string;
group?: string;
name?: string;
dependencyUrl: string;
}
async function downloadMavenXml(
dependency: MavenDependency,
repoUrl: string,
dependencyFilePath: string
): Promise<XmlDocument | null> {
let pkgUrl;
try {
pkgUrl = new url.URL(
`${dependency.dependencyUrl}/${dependencyFilePath}`,
repoUrl
);
} catch (err) {
logger.debug(
{ err, dependency, repoUrl, dependencyFilePath },
`Error constructing URL for ${dependency.display}`
);
return null;
}
let rawContent: string;
switch (pkgUrl.protocol) {
case 'file:':
rawContent = await downloadFileProtocol(pkgUrl);
break;
case 'http:':
case 'https:':
rawContent = await downloadHttpProtocol(pkgUrl);
break;
case 's3:':
logger.debug('Skipping s3 dependency');
return null;
default:
logger.warn(
`Invalid protocol ${pkgUrl.protocol} in repository ${repoUrl}`
);
return null;
}
if (!rawContent) {
logger.debug(`${dependency.display} not found in repository ${repoUrl}`);
return null;
}
try {
return new XmlDocument(rawContent);
} catch (e) {
logger.debug(`Can not parse ${pkgUrl.href} for ${dependency.display}`);
return null;
}
}
function extractVersions(metadata: XmlDocument): string[] {
const versions = metadata.descendantWithPath('versioning.versions');
const elements = versions && versions.childrenNamed('version');
if (!elements) return [];
return elements.map(el => el.val);
}
async function downloadFileProtocol(pkgUrl: url.URL): Promise<string | null> {
const pkgPath = pkgUrl.toString().replace('file://', '');
if (!(await fs.exists(pkgPath))) {
return null;
}
return fs.readFile(pkgPath, 'utf8');
}
function getLatestVersion(versions: string[]): string | null {
if (versions.length === 0) return null;
return versions.reduce((latestVersion, version) =>
compare(version, latestVersion) === 1 ? version : latestVersion
);
}
async function getDependencyInfo(
dependency: MavenDependency,
repoUrl: string,
version: string
): Promise<Partial<ReleaseResult>> {
const result: Partial<ReleaseResult> = {};
const path = `${version}/${dependency.name}-${version}.pom`;
const pomContent = await downloadMavenXml(dependency, repoUrl, path);
if (!pomContent) return result;
const homepage = pomContent.valueWithPath('url');
if (homepage && !containsPlaceholder(homepage)) {
result.homepage = homepage;
}
const sourceUrl = pomContent.valueWithPath('scm.url');
if (sourceUrl && !containsPlaceholder(sourceUrl)) {
result.sourceUrl = sourceUrl.replace(/^scm:/, '');
}
return result;
}

Просмотреть файл

@ -10,6 +10,23 @@ export function getNpmrc(): Record<string, any> | null {
return npmrc;
}
function envReplace(value: any, env = process.env): any {
// istanbul ignore if
if (!is.string(value)) {
return value;
}
const ENV_EXPR = /(\\*)\$\{([^}]+)\}/g;
return value.replace(ENV_EXPR, (match, esc, envVarName) => {
if (env[envVarName] === undefined) {
logger.warn('Failed to replace env in config: ' + match);
throw new Error('env-replace');
}
return env[envVarName];
});
}
export function setNpmrc(input?: string) {
if (input) {
if (input === npmrcRaw) {
@ -56,20 +73,3 @@ export function setNpmrc(input?: string) {
npmrcRaw = null;
}
}
function envReplace(value: any, env = process.env): any {
// istanbul ignore if
if (!is.string(value)) {
return value;
}
const ENV_EXPR = /(\\*)\$\{([^}]+)\}/g;
return value.replace(ENV_EXPR, (match, esc, envVarName) => {
if (env[envVarName] === undefined) {
logger.warn('Failed to replace env in config: ' + match);
throw new Error('env-replace');
}
return env[envVarName];
});
}

Просмотреть файл

@ -4,6 +4,20 @@ import * as v2 from './v2';
import * as v3 from './v3';
import { PkgReleaseConfig, ReleaseResult } from '../common';
function detectFeedVersion(url: string): 2 | 3 | null {
try {
const parsecUrl = urlApi.parse(url);
// Official client does it in the same way
if (parsecUrl.pathname.endsWith('.json')) {
return 3;
}
return 2;
} catch (e) {
logger.debug({ e }, `nuget registry failure: can't parse ${url}`);
return null;
}
}
export async function getPkgReleases({
lookupName,
registryUrls,
@ -32,17 +46,3 @@ export async function getPkgReleases({
}
return dep;
}
function detectFeedVersion(url: string): 2 | 3 | null {
try {
const parsecUrl = urlApi.parse(url);
// Official client does it in the same way
if (parsecUrl.pathname.endsWith('.json')) {
return 3;
}
return 2;
} catch (e) {
logger.debug({ e }, `nuget registry failure: can't parse ${url}`);
return null;
}
}

Просмотреть файл

@ -4,6 +4,10 @@ import { logger } from '../../logger';
import got from '../../util/got';
import { ReleaseResult } from '../common';
function getPkgProp(pkgInfo: XmlElement, propName: string) {
return pkgInfo.childNamed('m:properties').childNamed(`d:${propName}`).val;
}
export async function getPkgReleases(
feedUrl: string,
pkgName: string
@ -73,7 +77,3 @@ export async function getPkgReleases(
return null;
}
}
function getPkgProp(pkgInfo: XmlElement, propName: string) {
return pkgInfo.childNamed('m:properties').childNamed(`d:${propName}`).val;
}

Просмотреть файл

@ -28,33 +28,6 @@ function compatibleVersions(
);
}
export async function getPkgReleases({
compatibility,
lookupName,
registryUrls,
}: PkgReleaseConfig): Promise<ReleaseResult | null> {
let hostUrls = ['https://pypi.org/pypi/'];
if (is.nonEmptyArray(registryUrls)) {
hostUrls = registryUrls;
}
if (process.env.PIP_INDEX_URL) {
hostUrls = [process.env.PIP_INDEX_URL];
}
for (let hostUrl of hostUrls) {
hostUrl += hostUrl.endsWith('/') ? '' : '/';
let dep: ReleaseResult;
if (hostUrl.endsWith('/simple/') || hostUrl.endsWith('/+simple/')) {
dep = await getSimpleDependency(lookupName, hostUrl);
} else {
dep = await getDependency(lookupName, hostUrl, compatibility);
}
if (dep !== null) {
return dep;
}
}
return null;
}
async function getDependency(
depName: string,
hostUrl: string,
@ -108,6 +81,18 @@ async function getDependency(
}
}
function extractVersionFromLinkText(
text: string,
depName: string
): string | null {
const prefix = `${depName}-`;
const suffix = '.tar.gz';
if (!(text.startsWith(prefix) && text.endsWith(suffix))) {
return null;
}
return text.replace(prefix, '').replace(/\.tar\.gz$/, '');
}
async function getSimpleDependency(
depName: string,
hostUrl: string
@ -147,14 +132,29 @@ async function getSimpleDependency(
}
}
function extractVersionFromLinkText(
text: string,
depName: string
): string | null {
const prefix = `${depName}-`;
const suffix = '.tar.gz';
if (!(text.startsWith(prefix) && text.endsWith(suffix))) {
return null;
export async function getPkgReleases({
compatibility,
lookupName,
registryUrls,
}: PkgReleaseConfig): Promise<ReleaseResult | null> {
let hostUrls = ['https://pypi.org/pypi/'];
if (is.nonEmptyArray(registryUrls)) {
hostUrls = registryUrls;
}
return text.replace(prefix, '').replace(/\.tar\.gz$/, '');
if (process.env.PIP_INDEX_URL) {
hostUrls = [process.env.PIP_INDEX_URL];
}
for (let hostUrl of hostUrls) {
hostUrl += hostUrl.endsWith('/') ? '' : '/';
let dep: ReleaseResult;
if (hostUrl.endsWith('/simple/') || hostUrl.endsWith('/+simple/')) {
dep = await getSimpleDependency(lookupName, hostUrl);
} else {
dep = await getDependency(lookupName, hostUrl, compatibility);
}
if (dep !== null) {
return dep;
}
}
return null;
}

Просмотреть файл

@ -4,6 +4,87 @@ import { parseIndexDir, SBT_PLUGINS_REPO } from './util';
import { logger } from '../../logger';
import { PkgReleaseConfig, ReleaseResult } from '../common';
async function resolvePackageReleases(
searchRoot: string,
artifact: string,
scalaVersion: string
): Promise<string[]> {
const indexContent = await downloadHttpProtocol(searchRoot, 'sbt');
if (indexContent) {
const releases: string[] = [];
const parseSubdirs = (content: string) =>
parseIndexDir(content, x => {
if (x === artifact) return true;
if (x.indexOf(`${artifact}_native`) === 0) return false;
if (x.indexOf(`${artifact}_sjs`) === 0) return false;
return x.indexOf(`${artifact}_`) === 0;
});
const artifactSubdirs = parseSubdirs(indexContent);
let searchSubdirs = artifactSubdirs;
if (
scalaVersion &&
artifactSubdirs.indexOf(`${artifact}_${scalaVersion}`) !== -1
) {
searchSubdirs = [`${artifact}_${scalaVersion}`];
}
const parseReleases = (content: string) =>
parseIndexDir(content, x => !/^\.+$/.test(x));
for (const searchSubdir of searchSubdirs) {
const content = await downloadHttpProtocol(
`${searchRoot}/${searchSubdir}`,
'sbt'
);
if (content) {
const subdirReleases = parseReleases(content);
subdirReleases.forEach(x => releases.push(x));
}
}
if (releases.length) return [...new Set(releases)].sort(compare);
}
return null;
}
async function resolvePluginReleases(
rootUrl: string,
artifact: string,
scalaVersion: string
) {
const searchRoot = `${rootUrl}/${artifact}`;
const parse = (content: string) =>
parseIndexDir(content, x => !/^\.+$/.test(x));
const indexContent = await downloadHttpProtocol(searchRoot, 'sbt');
if (indexContent) {
const releases: string[] = [];
const scalaVersionItems = parse(indexContent);
const scalaVersions = scalaVersionItems.map(x => x.replace(/^scala_/, ''));
const searchVersions =
scalaVersions.indexOf(scalaVersion) === -1
? scalaVersions
: [scalaVersion];
for (const searchVersion of searchVersions) {
const searchSubRoot = `${searchRoot}/scala_${searchVersion}`;
const subRootContent = await downloadHttpProtocol(searchSubRoot, 'sbt');
if (subRootContent) {
const sbtVersionItems = parse(subRootContent);
for (const sbtItem of sbtVersionItems) {
const releasesRoot = `${searchSubRoot}/${sbtItem}`;
const releasesIndexContent = await downloadHttpProtocol(
releasesRoot,
'sbt'
);
if (releasesIndexContent) {
const releasesParsed = parse(releasesIndexContent);
releasesParsed.forEach(x => releases.push(x));
}
}
}
}
if (releases.length) return [...new Set(releases)].sort(compare);
}
return resolvePackageReleases(rootUrl, artifact, scalaVersion);
}
export async function getPkgReleases(
config: PkgReleaseConfig
): Promise<ReleaseResult | null> {
@ -58,84 +139,3 @@ export async function getPkgReleases(
);
return null;
}
async function resolvePluginReleases(
rootUrl: string,
artifact: string,
scalaVersion: string
) {
const searchRoot = `${rootUrl}/${artifact}`;
const parse = (content: string) =>
parseIndexDir(content, x => !/^\.+$/.test(x));
const indexContent = await downloadHttpProtocol(searchRoot, 'sbt');
if (indexContent) {
const releases: string[] = [];
const scalaVersionItems = parse(indexContent);
const scalaVersions = scalaVersionItems.map(x => x.replace(/^scala_/, ''));
const searchVersions =
scalaVersions.indexOf(scalaVersion) === -1
? scalaVersions
: [scalaVersion];
for (const searchVersion of searchVersions) {
const searchSubRoot = `${searchRoot}/scala_${searchVersion}`;
const subRootContent = await downloadHttpProtocol(searchSubRoot, 'sbt');
if (subRootContent) {
const sbtVersionItems = parse(subRootContent);
for (const sbtItem of sbtVersionItems) {
const releasesRoot = `${searchSubRoot}/${sbtItem}`;
const releasesIndexContent = await downloadHttpProtocol(
releasesRoot,
'sbt'
);
if (releasesIndexContent) {
const releasesParsed = parse(releasesIndexContent);
releasesParsed.forEach(x => releases.push(x));
}
}
}
}
if (releases.length) return [...new Set(releases)].sort(compare);
}
return resolvePackageReleases(rootUrl, artifact, scalaVersion);
}
async function resolvePackageReleases(
searchRoot: string,
artifact: string,
scalaVersion: string
): Promise<string[]> {
const indexContent = await downloadHttpProtocol(searchRoot, 'sbt');
if (indexContent) {
const releases: string[] = [];
const parseSubdirs = (content: string) =>
parseIndexDir(content, x => {
if (x === artifact) return true;
if (x.indexOf(`${artifact}_native`) === 0) return false;
if (x.indexOf(`${artifact}_sjs`) === 0) return false;
return x.indexOf(`${artifact}_`) === 0;
});
const artifactSubdirs = parseSubdirs(indexContent);
let searchSubdirs = artifactSubdirs;
if (
scalaVersion &&
artifactSubdirs.indexOf(`${artifact}_${scalaVersion}`) !== -1
) {
searchSubdirs = [`${artifact}_${scalaVersion}`];
}
const parseReleases = (content: string) =>
parseIndexDir(content, x => !/^\.+$/.test(x));
for (const searchSubdir of searchSubdirs) {
const content = await downloadHttpProtocol(
`${searchRoot}/${searchSubdir}`,
'sbt'
);
if (content) {
const subdirReleases = parseReleases(content);
subdirReleases.forEach(x => releases.push(x));
}
}
if (releases.length) return [...new Set(releases)].sort(compare);
}
return null;
}

Просмотреть файл

@ -71,33 +71,18 @@ export async function getAzureBranchObj(
};
}
export async function getChanges(
files: { name: string; contents: any }[],
repoId: string,
branchName: string
) {
const changes = [];
for (const file of files) {
// Add or update
let changeType = 1;
const fileAlreadyThere = await getFile(repoId, file.name, branchName);
if (fileAlreadyThere) {
changeType = 2;
}
changes.push({
changeType,
item: {
path: file.name,
},
newContent: {
Content: file.contents,
ContentType: 0, // RawText
},
async function streamToString(stream: NodeJS.ReadableStream) {
const chunks: string[] = [];
/* eslint-disable promise/avoid-new */
const p = await new Promise<string>(resolve => {
stream.on('data', (chunk: any) => {
chunks.push(chunk.toString());
});
}
return changes;
stream.on('end', () => {
resolve(chunks.join(''));
});
});
return p;
}
// if no branchName, look globaly
@ -144,18 +129,33 @@ export async function getFile(
return null; // no file found
}
async function streamToString(stream: NodeJS.ReadableStream) {
const chunks: string[] = [];
/* eslint-disable promise/avoid-new */
const p = await new Promise<string>(resolve => {
stream.on('data', (chunk: any) => {
chunks.push(chunk.toString());
export async function getChanges(
files: { name: string; contents: any }[],
repoId: string,
branchName: string
) {
const changes = [];
for (const file of files) {
// Add or update
let changeType = 1;
const fileAlreadyThere = await getFile(repoId, file.name, branchName);
if (fileAlreadyThere) {
changeType = 2;
}
changes.push({
changeType,
item: {
path: file.name,
},
newContent: {
Content: file.contents,
ContentType: 0, // RawText
},
});
stream.on('end', () => {
resolve(chunks.join(''));
});
});
return p;
}
return changes;
}
export function max4000Chars(str: string) {

Просмотреть файл

@ -63,6 +63,15 @@ export async function getRepos() {
return repos.map(repo => `${repo.project!.name}/${repo.name}`);
}
async function getBranchCommit(fullBranchName: string) {
const azureApiGit = await azureApi.gitApi();
const commit = await azureApiGit.getBranch(
config.repoId,
azureHelper.getBranchNameWithoutRefsheadsPrefix(fullBranchName)!
);
return commit.commit!.commitId;
}
export async function initRepo({
repository,
localDir,
@ -136,6 +145,12 @@ export function getRepoForceRebase() {
return false;
}
// Search
export /* istanbul ignore next */ function getFileList(branchName: string) {
return config.storage.getFileList(branchName);
}
export /* istanbul ignore next */ async function setBaseBranch(
branchName = config.baseBranch
) {
@ -153,12 +168,6 @@ export /* istanbul ignore next */ function setBranchPrefix(
return config.storage.setBranchPrefix(branchPrefix);
}
// Search
export /* istanbul ignore next */ function getFileList(branchName: string) {
return config.storage.getFileList(branchName);
}
// Branch
export /* istanbul ignore next */ function branchExists(branchName: string) {
@ -182,60 +191,40 @@ export /* istanbul ignore next */ function getFile(
return config.storage.getFile(filePath, branchName);
}
export /* istanbul ignore next */ async function deleteBranch(
branchName: string,
abandonAssociatedPr = false
) {
await config.storage.deleteBranch(branchName);
if (abandonAssociatedPr) {
const pr = await getBranchPr(branchName);
await abandonPr(pr.number);
}
}
export /* istanbul ignore next */ function getBranchLastCommitTime(
branchName: string
) {
return config.storage.getBranchLastCommitTime(branchName);
}
export /* istanbul ignore next */ function getRepoStatus() {
return config.storage.getRepoStatus();
}
export /* istanbul ignore next */ function mergeBranch(branchName: string) {
return config.storage.mergeBranch(branchName);
}
export /* istanbul ignore next */ function commitFilesToBranch(
branchName: string,
files: any[],
message: string,
parentBranch = config.baseBranch
) {
return config.storage.commitFilesToBranch(
branchName,
files,
message,
parentBranch
);
}
export /* istanbul ignore next */ function getCommitMessages() {
return config.storage.getCommitMessages();
}
async function getBranchCommit(fullBranchName: string) {
// istanbul ignore next
async function abandonPr(prNo: number) {
logger.debug(`abandonPr(prNo)(${prNo})`);
const azureApiGit = await azureApi.gitApi();
const commit = await azureApiGit.getBranch(
await azureApiGit.updatePullRequest(
{
status: 2,
},
config.repoId,
azureHelper.getBranchNameWithoutRefsheadsPrefix(fullBranchName)!
prNo
);
return commit.commit!.commitId;
}
export function getPrList() {
return [];
export async function getPr(pullRequestId: number) {
logger.debug(`getPr(${pullRequestId})`);
if (!pullRequestId) {
return null;
}
const azureApiGit = await azureApi.gitApi();
const prs = await azureApiGit.getPullRequests(config.repoId, { status: 4 });
const azurePr: any = prs.find(item => item.pullRequestId === pullRequestId);
if (!azurePr) {
return null;
}
const labels = await azureApiGit.getPullRequestLabels(
config.repoId,
pullRequestId
);
azurePr.labels = labels
.filter(label => label.active)
.map(label => label.name);
logger.debug(`pr: (${azurePr})`);
const pr = azureHelper.getRenovatePRFormat(azurePr);
return pr;
}
export async function findPr(
@ -288,22 +277,51 @@ export async function getBranchPr(branchName: string) {
return existingPr ? getPr(existingPr.pullRequestId) : null;
}
export async function getBranchStatus(
export /* istanbul ignore next */ async function deleteBranch(
branchName: string,
requiredStatusChecks: any
abandonAssociatedPr = false
) {
logger.debug(`getBranchStatus(${branchName})`);
if (!requiredStatusChecks) {
// null means disable status checks, so it always succeeds
return 'success';
await config.storage.deleteBranch(branchName);
if (abandonAssociatedPr) {
const pr = await getBranchPr(branchName);
await abandonPr(pr.number);
}
if (requiredStatusChecks.length) {
// This is Unsupported
logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`);
return 'failed';
}
const branchStatusCheck = await getBranchStatusCheck(branchName);
return branchStatusCheck;
}
export /* istanbul ignore next */ function getBranchLastCommitTime(
branchName: string
) {
return config.storage.getBranchLastCommitTime(branchName);
}
export /* istanbul ignore next */ function getRepoStatus() {
return config.storage.getRepoStatus();
}
export /* istanbul ignore next */ function mergeBranch(branchName: string) {
return config.storage.mergeBranch(branchName);
}
export /* istanbul ignore next */ function commitFilesToBranch(
branchName: string,
files: any[],
message: string,
parentBranch = config.baseBranch
) {
return config.storage.commitFilesToBranch(
branchName,
files,
message,
parentBranch
);
}
export /* istanbul ignore next */ function getCommitMessages() {
return config.storage.getCommitMessages();
}
export function getPrList() {
return [];
}
export async function getBranchStatusCheck(
@ -322,27 +340,22 @@ export async function getBranchStatusCheck(
return 'pending';
}
export async function getPr(pullRequestId: number) {
logger.debug(`getPr(${pullRequestId})`);
if (!pullRequestId) {
return null;
export async function getBranchStatus(
branchName: string,
requiredStatusChecks: any
) {
logger.debug(`getBranchStatus(${branchName})`);
if (!requiredStatusChecks) {
// null means disable status checks, so it always succeeds
return 'success';
}
const azureApiGit = await azureApi.gitApi();
const prs = await azureApiGit.getPullRequests(config.repoId, { status: 4 });
const azurePr: any = prs.find(item => item.pullRequestId === pullRequestId);
if (!azurePr) {
return null;
if (requiredStatusChecks.length) {
// This is Unsupported
logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`);
return 'failed';
}
const labels = await azureApiGit.getPullRequestLabels(
config.repoId,
pullRequestId
);
azurePr.labels = labels
.filter(label => label.active)
.map(label => label.name);
logger.debug(`pr: (${azurePr})`);
const pr = azureHelper.getRenovatePRFormat(azurePr);
return pr;
const branchStatusCheck = await getBranchStatusCheck(branchName);
return branchStatusCheck;
}
export async function createPr(
@ -458,19 +471,6 @@ export async function ensureCommentRemoval(issueNo: number, topic: string) {
}
}
// istanbul ignore next
async function abandonPr(prNo: number) {
logger.debug(`abandonPr(prNo)(${prNo})`);
const azureApiGit = await azureApi.gitApi();
await azureApiGit.updatePullRequest(
{
status: 2,
},
config.repoId,
prNo
);
}
export function setBranchStatus(
branchName: string,
context: string,

Просмотреть файл

@ -244,6 +244,139 @@ export function branchExists(branchName: string) {
return config.storage.branchExists(branchName);
}
export function isBranchStale(branchName: string) {
logger.debug(`isBranchStale(${branchName})`);
return config.storage.isBranchStale(branchName);
}
// Gets details for a PR
export async function getPr(prNo: number, refreshCache?: boolean) {
logger.debug(`getPr(${prNo})`);
if (!prNo) {
return null;
}
const res = await api.get(
`./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}`,
{ useCache: !refreshCache }
);
const pr: any = {
displayNumber: `Pull Request #${res.body.id}`,
...utils.prInfo(res.body),
reviewers: res.body.reviewers.map(
(r: { user: { name: any } }) => r.user.name
),
isModified: false,
};
pr.version = updatePrVersion(pr.number, pr.version);
if (pr.state === 'open') {
const mergeRes = await api.get(
`./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/merge`,
{ useCache: !refreshCache }
);
pr.isConflicted = !!mergeRes.body.conflicted;
pr.canMerge = !!mergeRes.body.canMerge;
const prCommits = (await api.get(
`./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/commits?withCounts=true`,
{ useCache: !refreshCache }
)).body;
if (prCommits.totalCount === 1) {
if (global.gitAuthor) {
const commitAuthorEmail = prCommits.values[0].author.emailAddress;
if (commitAuthorEmail !== global.gitAuthor.email) {
logger.debug(
{ prNo },
'PR is modified: 1 commit but not by configured gitAuthor'
);
pr.isModified = true;
}
}
} else {
logger.debug(
{ prNo },
`PR is modified: Found ${prCommits.totalCount} commits`
);
pr.isModified = true;
}
}
if (await branchExists(pr.branchName)) {
pr.isStale = await isBranchStale(pr.branchName);
}
return pr;
}
// TODO: coverage
// istanbul ignore next
function matchesState(state: string, desiredState: string) {
if (desiredState === 'all') {
return true;
}
if (desiredState[0] === '!') {
return state !== desiredState.substring(1);
}
return state === desiredState;
}
// TODO: coverage
// istanbul ignore next
const isRelevantPr = (
branchName: string,
prTitle: string | null | undefined,
state: string
) => (p: { branchName: string; title: string; state: string }) =>
p.branchName === branchName &&
(!prTitle || p.title === prTitle) &&
matchesState(p.state, state);
// TODO: coverage
// eslint-disable-next-line @typescript-eslint/no-unused-vars
export async function getPrList(_args?: any) {
logger.debug(`getPrList()`);
// istanbul ignore next
if (!config.prList) {
const query = new URLSearchParams({
state: 'ALL',
'role.1': 'AUTHOR',
'username.1': config.username,
}).toString();
const values = await utils.accumulateValues(
`./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests?${query}`
);
config.prList = values.map(utils.prInfo);
logger.info({ length: config.prList.length }, 'Retrieved Pull Requests');
} else {
logger.debug('returning cached PR list');
}
return config.prList;
}
// TODO: coverage
// istanbul ignore next
export async function findPr(
branchName: string,
prTitle?: string,
state = 'all',
refreshCache?: boolean
) {
logger.debug(`findPr(${branchName}, "${prTitle}", "${state}")`);
const prList = await getPrList({ refreshCache });
const pr = prList.find(isRelevantPr(branchName, prTitle, state));
if (pr) {
logger.debug(`Found PR #${pr.number}`);
} else {
logger.debug(`DID NOT Found PR from branch #${branchName}`);
}
return pr;
}
// Returns the Pull Request for a branch. Null if not exists.
export async function getBranchPr(branchName: string, refreshCache?: boolean) {
logger.debug(`getBranchPr(${branchName})`);
@ -256,11 +389,6 @@ export function getAllRenovateBranches(branchPrefix: string) {
return config.storage.getAllRenovateBranches(branchPrefix);
}
export function isBranchStale(branchName: string) {
logger.debug(`isBranchStale(${branchName})`);
return config.storage.isBranchStale(branchName);
}
export async function commitFilesToBranch(
branchName: string,
files: any[],
@ -656,71 +784,6 @@ export async function ensureCommentRemoval(prNo: number, topic: string) {
}
}
// TODO: coverage
// eslint-disable-next-line @typescript-eslint/no-unused-vars
export async function getPrList(_args?: any) {
logger.debug(`getPrList()`);
// istanbul ignore next
if (!config.prList) {
const query = new URLSearchParams({
state: 'ALL',
'role.1': 'AUTHOR',
'username.1': config.username,
}).toString();
const values = await utils.accumulateValues(
`./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests?${query}`
);
config.prList = values.map(utils.prInfo);
logger.info({ length: config.prList.length }, 'Retrieved Pull Requests');
} else {
logger.debug('returning cached PR list');
}
return config.prList;
}
// TODO: coverage
// istanbul ignore next
function matchesState(state: string, desiredState: string) {
if (desiredState === 'all') {
return true;
}
if (desiredState[0] === '!') {
return state !== desiredState.substring(1);
}
return state === desiredState;
}
// TODO: coverage
// istanbul ignore next
const isRelevantPr = (
branchName: string,
prTitle: string | null | undefined,
state: string
) => (p: { branchName: string; title: string; state: string }) =>
p.branchName === branchName &&
(!prTitle || p.title === prTitle) &&
matchesState(p.state, state);
// TODO: coverage
// istanbul ignore next
export async function findPr(
branchName: string,
prTitle?: string,
state = 'all',
refreshCache?: boolean
) {
logger.debug(`findPr(${branchName}, "${prTitle}", "${state}")`);
const prList = await getPrList({ refreshCache });
const pr = prList.find(isRelevantPr(branchName, prTitle, state));
if (pr) {
logger.debug(`Found PR #${pr.number}`);
} else {
logger.debug(`DID NOT Found PR from branch #${branchName}`);
}
return pr;
}
// Pull Request
const escapeHash = input => (input ? input.replace(/#/g, '%23') : input);
@ -808,69 +871,6 @@ export async function createPr(
return pr;
}
// Gets details for a PR
export async function getPr(prNo: number, refreshCache?: boolean) {
logger.debug(`getPr(${prNo})`);
if (!prNo) {
return null;
}
const res = await api.get(
`./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}`,
{ useCache: !refreshCache }
);
const pr: any = {
displayNumber: `Pull Request #${res.body.id}`,
...utils.prInfo(res.body),
reviewers: res.body.reviewers.map(
(r: { user: { name: any } }) => r.user.name
),
isModified: false,
};
pr.version = updatePrVersion(pr.number, pr.version);
if (pr.state === 'open') {
const mergeRes = await api.get(
`./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/merge`,
{ useCache: !refreshCache }
);
pr.isConflicted = !!mergeRes.body.conflicted;
pr.canMerge = !!mergeRes.body.canMerge;
const prCommits = (await api.get(
`./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/commits?withCounts=true`,
{ useCache: !refreshCache }
)).body;
if (prCommits.totalCount === 1) {
if (global.gitAuthor) {
const commitAuthorEmail = prCommits.values[0].author.emailAddress;
if (commitAuthorEmail !== global.gitAuthor.email) {
logger.debug(
{ prNo },
'PR is modified: 1 commit but not by configured gitAuthor'
);
pr.isModified = true;
}
}
} else {
logger.debug(
{ prNo },
`PR is modified: Found ${prCommits.totalCount} commits`
);
pr.isModified = true;
}
}
if (await branchExists(pr.branchName)) {
pr.isStale = await isBranchStale(pr.branchName);
}
return pr;
}
// Return a list of all modified files in a PR
// https://docs.atlassian.com/bitbucket-server/rest/6.0.0/bitbucket-rest.html
export async function getPrFiles(prNo: number) {

Просмотреть файл

@ -138,6 +138,13 @@ export function getRepoForceRebase() {
return false;
}
// Search
// Get full file list
export function getFileList(branchName?: string) {
return config.storage.getFileList(branchName);
}
export async function setBaseBranch(branchName = config.baseBranch) {
logger.debug(`Setting baseBranch to ${branchName}`);
config.baseBranch = branchName;
@ -153,13 +160,6 @@ export /* istanbul ignore next */ function setBranchPrefix(
return config.storage.setBranchPrefix(branchPrefix);
}
// Search
// Get full file list
export function getFileList(branchName?: string) {
return config.storage.getFileList(branchName);
}
// Branch
// Returns true if branch exists, otherwise false
@ -179,6 +179,49 @@ export function getFile(filePath: string, branchName?: string) {
return config.storage.getFile(filePath, branchName);
}
// istanbul ignore next
function matchesState(state: string, desiredState: string) {
if (desiredState === 'all') {
return true;
}
if (desiredState[0] === '!') {
return state !== desiredState.substring(1);
}
return state === desiredState;
}
export async function getPrList() {
logger.debug('getPrList()');
if (!config.prList) {
logger.debug('Retrieving PR list');
let url = `/2.0/repositories/${config.repository}/pullrequests?`;
url += utils.prStates.all.map(state => 'state=' + state).join('&');
const prs = await utils.accumulateValues(url, undefined, undefined, 50);
config.prList = prs.map(utils.prInfo);
logger.info({ length: config.prList.length }, 'Retrieved Pull Requests');
}
return config.prList;
}
export async function findPr(
branchName: string,
prTitle?: string | null,
state = 'all'
) {
logger.debug(`findPr(${branchName}, ${prTitle}, ${state})`);
const prList = await getPrList();
const pr = prList.find(
(p: { branchName: string; title: string; state: string }) =>
p.branchName === branchName &&
(!prTitle || p.title === prTitle) &&
matchesState(p.state, state)
);
if (pr) {
logger.debug(`Found PR #${pr.number}`);
}
return pr;
}
export async function deleteBranch(branchName: string, closePr?: boolean) {
if (closePr) {
const pr = await findPr(branchName, null, 'open');
@ -222,6 +265,88 @@ export function getCommitMessages() {
return config.storage.getCommitMessages();
}
async function isPrConflicted(prNo: number) {
const diff = (await api.get(
`/2.0/repositories/${config.repository}/pullrequests/${prNo}/diff`,
{ json: false } as any
)).body;
return utils.isConflicted(parseDiff(diff));
}
// Gets details for a PR
export async function getPr(prNo: number) {
const pr = (await api.get(
`/2.0/repositories/${config.repository}/pullrequests/${prNo}`
)).body;
// istanbul ignore if
if (!pr) {
return null;
}
const res: any = {
displayNumber: `Pull Request #${pr.id}`,
...utils.prInfo(pr),
isModified: false,
};
if (utils.prStates.open.includes(pr.state)) {
res.isConflicted = await isPrConflicted(prNo);
// TODO: Is that correct? Should we check getBranchStatus like gitlab?
res.canMerge = !res.isConflicted;
// we only want the first two commits, because size tells us the overall number
const url = pr.links.commits.href + '?pagelen=2';
const { body } = await api.get<utils.PagedResult<Commit>>(url);
const size = body.size || body.values.length;
// istanbul ignore if
if (size === undefined) {
logger.warn({ prNo, url, body }, 'invalid response so can rebase');
} else if (size === 1) {
if (global.gitAuthor) {
const author = addrs.parseOneAddress(
body.values[0].author.raw
) as addrs.ParsedMailbox;
if (author.address !== global.gitAuthor.email) {
logger.debug(
{ prNo },
'PR is modified: 1 commit but not by configured gitAuthor'
);
res.isModified = true;
}
}
} else {
logger.debug({ prNo }, `PR is modified: Found ${size} commits`);
res.isModified = true;
}
}
if (await branchExists(pr.source.branch.name)) {
res.isStale = await isBranchStale(pr.source.branch.name);
}
return res;
}
const escapeHash = input => (input ? input.replace(/#/g, '%23') : input);
// Return the commit SHA for a branch
async function getBranchCommit(branchName: string) {
try {
const branch = (await api.get(
`/2.0/repositories/${config.repository}/refs/branches/${escapeHash(
branchName
)}`
)).body;
return branch.target.hash;
} catch (err) /* istanbul ignore next */ {
logger.debug({ err }, `getBranchCommit('${branchName}') failed'`);
return null;
}
}
// Returns the Pull Request for a branch. Null if not exists.
export async function getBranchPr(branchName: string) {
logger.debug(`getBranchPr(${branchName})`);
@ -365,6 +490,15 @@ async function closeIssue(issueNumber: number) {
);
}
export function getPrBody(input: string) {
// Remove any HTML we use
return smartTruncate(input, 50000)
.replace(/<\/?summary>/g, '**')
.replace(/<\/?details>/g, '')
.replace(new RegExp(`\n---\n\n.*?<!-- ${appSlug}-rebase -->.*?\n`), '')
.replace(/\]\(\.\.\/pull\//g, '](../../pull-requests/');
}
export async function ensureIssue(title: string, body: string) {
logger.debug(`ensureIssue()`);
const description = getPrBody(sanitize(body));
@ -497,36 +631,6 @@ export function ensureCommentRemoval(prNo: number, topic: string) {
return comments.ensureCommentRemoval(config, prNo, topic);
}
// istanbul ignore next
function matchesState(state: string, desiredState: string) {
if (desiredState === 'all') {
return true;
}
if (desiredState[0] === '!') {
return state !== desiredState.substring(1);
}
return state === desiredState;
}
export async function findPr(
branchName: string,
prTitle?: string | null,
state = 'all'
) {
logger.debug(`findPr(${branchName}, ${prTitle}, ${state})`);
const prList = await getPrList();
const pr = prList.find(
(p: { branchName: string; title: string; state: string }) =>
p.branchName === branchName &&
(!prTitle || p.title === prTitle) &&
matchesState(p.state, state)
);
if (pr) {
logger.debug(`Found PR #${pr.number}`);
}
return pr;
}
// Creates PR and returns PR number
export async function createPr(
branchName: string,
@ -587,15 +691,6 @@ export async function createPr(
return pr;
}
async function isPrConflicted(prNo: number) {
const diff = (await api.get(
`/2.0/repositories/${config.repository}/pullrequests/${prNo}/diff`,
{ json: false } as any
)).body;
return utils.isConflicted(parseDiff(diff));
}
interface Reviewer {
uuid: { raw: string };
}
@ -603,61 +698,6 @@ interface Reviewer {
interface Commit {
author: { raw: string };
}
// Gets details for a PR
export async function getPr(prNo: number) {
const pr = (await api.get(
`/2.0/repositories/${config.repository}/pullrequests/${prNo}`
)).body;
// istanbul ignore if
if (!pr) {
return null;
}
const res: any = {
displayNumber: `Pull Request #${pr.id}`,
...utils.prInfo(pr),
isModified: false,
};
if (utils.prStates.open.includes(pr.state)) {
res.isConflicted = await isPrConflicted(prNo);
// TODO: Is that correct? Should we check getBranchStatus like gitlab?
res.canMerge = !res.isConflicted;
// we only want the first two commits, because size tells us the overall number
const url = pr.links.commits.href + '?pagelen=2';
const { body } = await api.get<utils.PagedResult<Commit>>(url);
const size = body.size || body.values.length;
// istanbul ignore if
if (size === undefined) {
logger.warn({ prNo, url, body }, 'invalid response so can rebase');
} else if (size === 1) {
if (global.gitAuthor) {
const author = addrs.parseOneAddress(
body.values[0].author.raw
) as addrs.ParsedMailbox;
if (author.address !== global.gitAuthor.email) {
logger.debug(
{ prNo },
'PR is modified: 1 commit but not by configured gitAuthor'
);
res.isModified = true;
}
}
} else {
logger.debug({ prNo }, `PR is modified: Found ${size} commits`);
res.isModified = true;
}
}
if (await branchExists(pr.source.branch.name)) {
res.isStale = await isBranchStale(pr.source.branch.name);
}
return res;
}
// Return a list of all modified files in a PR
export async function getPrFiles(prNo: number) {
@ -703,47 +743,8 @@ export async function mergePr(prNo: number, branchName: string) {
return true;
}
export function getPrBody(input: string) {
// Remove any HTML we use
return smartTruncate(input, 50000)
.replace(/<\/?summary>/g, '**')
.replace(/<\/?details>/g, '')
.replace(new RegExp(`\n---\n\n.*?<!-- ${appSlug}-rebase -->.*?\n`), '')
.replace(/\]\(\.\.\/pull\//g, '](../../pull-requests/');
}
const escapeHash = input => (input ? input.replace(/#/g, '%23') : input);
// Return the commit SHA for a branch
async function getBranchCommit(branchName: string) {
try {
const branch = (await api.get(
`/2.0/repositories/${config.repository}/refs/branches/${escapeHash(
branchName
)}`
)).body;
return branch.target.hash;
} catch (err) /* istanbul ignore next */ {
logger.debug({ err }, `getBranchCommit('${branchName}') failed'`);
return null;
}
}
// Pull Request
export async function getPrList() {
logger.debug('getPrList()');
if (!config.prList) {
logger.debug('Retrieving PR list');
let url = `/2.0/repositories/${config.repository}/pullrequests?`;
url += utils.prStates.all.map(state => 'state=' + state).join('&');
const prs = await utils.accumulateValues(url, undefined, undefined, 50);
config.prList = prs.map(utils.prInfo);
logger.info({ length: config.prList.length }, 'Retrieved Pull Requests');
}
return config.prList;
}
export function cleanRepo() {
// istanbul ignore if
if (config.storage && config.storage.cleanRepo) {

Просмотреть файл

@ -25,6 +25,40 @@ interface LocalConfig extends StorageConfig {
branchPrefix: string;
}
// istanbul ignore next
function checkForPlatformFailure(err: Error) {
if (process.env.NODE_ENV === 'test') {
return;
}
const platformFailureStrings = [
'remote: Invalid username or password',
'gnutls_handshake() failed',
'The requested URL returned error: 5',
'The remote end hung up unexpectedly',
'access denied or repository not exported',
'Could not write new index file',
'Failed to connect to',
'Connection timed out',
];
for (const errorStr of platformFailureStrings) {
if (err.message.includes(errorStr)) {
throw new Error('platform-failure');
}
}
}
function localName(branchName: string) {
return branchName.replace(/^origin\//, '');
}
function throwBaseBranchValidationError(branchName) {
const error = new Error('config-validation');
error.validationError = 'baseBranch not found';
error.validationMessage =
'The following configured baseBranch could not be found: ' + branchName;
throw error;
}
export class Storage {
private _config: LocalConfig = {} as any;
@ -489,38 +523,4 @@ export class Storage {
}
}
function localName(branchName: string) {
return branchName.replace(/^origin\//, '');
}
// istanbul ignore next
function checkForPlatformFailure(err: Error) {
if (process.env.NODE_ENV === 'test') {
return;
}
const platformFailureStrings = [
'remote: Invalid username or password',
'gnutls_handshake() failed',
'The requested URL returned error: 5',
'The remote end hung up unexpectedly',
'access denied or repository not exported',
'Could not write new index file',
'Failed to connect to',
'Connection timed out',
];
for (const errorStr of platformFailureStrings) {
if (err.message.includes(errorStr)) {
throw new Error('platform-failure');
}
}
}
function throwBaseBranchValidationError(branchName) {
const error = new Error('config-validation');
error.validationError = 'baseBranch not found';
error.validationMessage =
'The following configured baseBranch could not be found: ' + branchName;
throw error;
}
export default Storage;

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -227,13 +227,232 @@ export function getFileList(branchName = config.baseBranch) {
return config.storage.getFileList(branchName);
}
// Branch
// Returns true if branch exists, otherwise false
export function branchExists(branchName: string) {
return config.storage.branchExists(branchName);
}
// Returns the combined status for a branch.
export async function getBranchStatus(
branchName: string,
requiredStatusChecks?: string[] | null
) {
logger.debug(`getBranchStatus(${branchName})`);
if (!requiredStatusChecks) {
// null means disable status checks, so it always succeeds
return 'success';
}
if (Array.isArray(requiredStatusChecks) && requiredStatusChecks.length) {
// This is Unsupported
logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`);
return 'failed';
}
if (!(await branchExists(branchName))) {
throw new Error('repository-changed');
}
// First, get the branch commit SHA
const branchSha = await config.storage.getBranchCommit(branchName);
// Now, check the statuses for that commit
const url = `projects/${config.repository}/repository/commits/${branchSha}/statuses`;
const res = await api.get(url, { paginate: true });
logger.debug(`Got res with ${res.body.length} results`);
if (res.body.length === 0) {
// Return 'pending' if we have no status checks
return 'pending';
}
let status = 'success';
// Return 'success' if all are success
res.body.forEach((check: { status: string; allow_failure?: boolean }) => {
// If one is failed then don't overwrite that
if (status !== 'failure') {
if (!check.allow_failure) {
if (check.status === 'failed') {
status = 'failure';
} else if (check.status !== 'success') {
({ status } = check);
}
}
}
});
return status;
}
// Pull Request
export async function createPr(
branchName: string,
title: string,
rawDescription: string,
labels?: string[] | null,
useDefaultBranch?: boolean,
platformOptions?: PlatformPrOptions
) {
const description = sanitize(rawDescription);
const targetBranch = useDefaultBranch
? config.defaultBranch
: config.baseBranch;
logger.debug(`Creating Merge Request: ${title}`);
const res = await api.post(`projects/${config.repository}/merge_requests`, {
body: {
source_branch: branchName,
target_branch: targetBranch,
remove_source_branch: true,
title,
description,
labels: is.array(labels) ? labels.join(',') : null,
},
});
const pr = res.body;
pr.number = pr.iid;
pr.branchName = branchName;
pr.displayNumber = `Merge Request #${pr.iid}`;
pr.isModified = false;
// istanbul ignore if
if (config.prList) {
config.prList.push(pr);
}
if (platformOptions && platformOptions.gitLabAutomerge) {
try {
await api.put(
`projects/${config.repository}/merge_requests/${pr.iid}/merge`,
{
body: {
should_remove_source_branch: true,
merge_when_pipeline_succeeds: true,
},
}
);
} catch (err) /* istanbul ignore next */ {
logger.debug({ err }, 'Automerge on PR creation failed');
}
}
return pr;
}
export async function getPr(iid: number) {
logger.debug(`getPr(${iid})`);
const url = `projects/${config.repository}/merge_requests/${iid}?include_diverged_commits_count=1`;
const pr = (await api.get(url)).body;
// Harmonize fields with GitHub
pr.branchName = pr.source_branch;
pr.targetBranch = pr.target_branch;
pr.number = pr.iid;
pr.displayNumber = `Merge Request #${pr.iid}`;
pr.body = pr.description;
pr.isStale = pr.diverged_commits_count > 0;
pr.state = pr.state === 'opened' ? 'open' : pr.state;
pr.isModified = true;
if (pr.merge_status === 'cannot_be_merged') {
logger.debug('pr cannot be merged');
pr.canMerge = false;
pr.isConflicted = true;
} else if (pr.state === 'open') {
const branchStatus = await getBranchStatus(pr.branchName, []);
if (branchStatus === 'success') {
pr.canMerge = true;
}
}
// Check if the most recent branch commit is by us
// If not then we don't allow it to be rebased, in case someone's changes would be lost
const branchUrl = `projects/${
config.repository
}/repository/branches/${urlEscape(pr.source_branch)}`;
try {
const branch = (await api.get(branchUrl)).body;
const branchCommitEmail =
branch && branch.commit ? branch.commit.author_email : null;
// istanbul ignore if
if (branchCommitEmail === config.email) {
pr.isModified = false;
} else {
logger.debug(
{ branchCommitEmail, configEmail: config.email, iid: pr.iid },
'Last committer to branch does not match bot email, so PR cannot be rebased.'
);
pr.isModified = true;
}
} catch (err) {
logger.debug({ err }, 'Error getting PR branch');
if (pr.state === 'open' || err.statusCode !== 404) {
logger.warn({ err }, 'Error getting PR branch');
pr.isConflicted = true;
}
}
return pr;
}
// Return a list of all modified files in a PR
export async function getPrFiles(mrNo: number) {
logger.debug({ mrNo }, 'getPrFiles');
if (!mrNo) {
return [];
}
const files = (await api.get(
`projects/${config.repository}/merge_requests/${mrNo}/changes`
)).body.changes;
return files.map((f: { new_path: string }) => f.new_path);
}
// istanbul ignore next
async function closePr(iid: number) {
await api.put(`projects/${config.repository}/merge_requests/${iid}`, {
body: {
state_event: 'close',
},
});
}
export async function updatePr(
iid: number,
title: string,
description: string
) {
await api.put(`projects/${config.repository}/merge_requests/${iid}`, {
body: {
title,
description: sanitize(description),
},
});
}
export async function mergePr(iid: number) {
try {
await api.put(`projects/${config.repository}/merge_requests/${iid}/merge`, {
body: {
should_remove_source_branch: true,
},
});
return true;
} catch (err) /* istanbul ignore next */ {
if (err.statusCode === 401) {
logger.info('No permissions to merge PR');
return false;
}
if (err.statusCode === 406) {
logger.info('PR not acceptable for merging');
return false;
}
logger.debug({ err }, 'merge PR error');
logger.info('PR merge failed');
return false;
}
}
export function getPrBody(input: string) {
return smartTruncate(
input
.replace(/Pull Request/g, 'Merge Request')
.replace(/PR/g, 'MR')
.replace(/\]\(\.\.\/pull\//g, '](../merge_requests/'),
1000000
);
}
// Branch
// Returns the Pull Request for a branch. Null if not exists.
export async function getBranchPr(branchName: string) {
logger.debug(`getBranchPr(${branchName})`);
@ -312,53 +531,6 @@ export function getRepoStatus() {
return config.storage.getRepoStatus();
}
// Returns the combined status for a branch.
export async function getBranchStatus(
branchName: string,
requiredStatusChecks?: string[] | null
) {
logger.debug(`getBranchStatus(${branchName})`);
if (!requiredStatusChecks) {
// null means disable status checks, so it always succeeds
return 'success';
}
if (Array.isArray(requiredStatusChecks) && requiredStatusChecks.length) {
// This is Unsupported
logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`);
return 'failed';
}
if (!(await branchExists(branchName))) {
throw new Error('repository-changed');
}
// First, get the branch commit SHA
const branchSha = await config.storage.getBranchCommit(branchName);
// Now, check the statuses for that commit
const url = `projects/${config.repository}/repository/commits/${branchSha}/statuses`;
const res = await api.get(url, { paginate: true });
logger.debug(`Got res with ${res.body.length} results`);
if (res.body.length === 0) {
// Return 'pending' if we have no status checks
return 'pending';
}
let status = 'success';
// Return 'success' if all are success
res.body.forEach((check: { status: string; allow_failure?: boolean }) => {
// If one is failed then don't overwrite that
if (status !== 'failure') {
if (!check.allow_failure) {
if (check.status === 'failed') {
status = 'failure';
} else if (check.status !== 'success') {
({ status } = check);
}
}
}
});
return status;
}
export async function getBranchStatusCheck(
branchName: string,
context: string
@ -692,178 +864,6 @@ export async function findPr(
);
}
// Pull Request
export async function createPr(
branchName: string,
title: string,
rawDescription: string,
labels?: string[] | null,
useDefaultBranch?: boolean,
platformOptions?: PlatformPrOptions
) {
const description = sanitize(rawDescription);
const targetBranch = useDefaultBranch
? config.defaultBranch
: config.baseBranch;
logger.debug(`Creating Merge Request: ${title}`);
const res = await api.post(`projects/${config.repository}/merge_requests`, {
body: {
source_branch: branchName,
target_branch: targetBranch,
remove_source_branch: true,
title,
description,
labels: is.array(labels) ? labels.join(',') : null,
},
});
const pr = res.body;
pr.number = pr.iid;
pr.branchName = branchName;
pr.displayNumber = `Merge Request #${pr.iid}`;
pr.isModified = false;
// istanbul ignore if
if (config.prList) {
config.prList.push(pr);
}
if (platformOptions && platformOptions.gitLabAutomerge) {
try {
await api.put(
`projects/${config.repository}/merge_requests/${pr.iid}/merge`,
{
body: {
should_remove_source_branch: true,
merge_when_pipeline_succeeds: true,
},
}
);
} catch (err) /* istanbul ignore next */ {
logger.debug({ err }, 'Automerge on PR creation failed');
}
}
return pr;
}
export async function getPr(iid: number) {
logger.debug(`getPr(${iid})`);
const url = `projects/${config.repository}/merge_requests/${iid}?include_diverged_commits_count=1`;
const pr = (await api.get(url)).body;
// Harmonize fields with GitHub
pr.branchName = pr.source_branch;
pr.targetBranch = pr.target_branch;
pr.number = pr.iid;
pr.displayNumber = `Merge Request #${pr.iid}`;
pr.body = pr.description;
pr.isStale = pr.diverged_commits_count > 0;
pr.state = pr.state === 'opened' ? 'open' : pr.state;
pr.isModified = true;
if (pr.merge_status === 'cannot_be_merged') {
logger.debug('pr cannot be merged');
pr.canMerge = false;
pr.isConflicted = true;
} else if (pr.state === 'open') {
const branchStatus = await getBranchStatus(pr.branchName, []);
if (branchStatus === 'success') {
pr.canMerge = true;
}
}
// Check if the most recent branch commit is by us
// If not then we don't allow it to be rebased, in case someone's changes would be lost
const branchUrl = `projects/${
config.repository
}/repository/branches/${urlEscape(pr.source_branch)}`;
try {
const branch = (await api.get(branchUrl)).body;
const branchCommitEmail =
branch && branch.commit ? branch.commit.author_email : null;
// istanbul ignore if
if (branchCommitEmail === config.email) {
pr.isModified = false;
} else {
logger.debug(
{ branchCommitEmail, configEmail: config.email, iid: pr.iid },
'Last committer to branch does not match bot email, so PR cannot be rebased.'
);
pr.isModified = true;
}
} catch (err) {
logger.debug({ err }, 'Error getting PR branch');
if (pr.state === 'open' || err.statusCode !== 404) {
logger.warn({ err }, 'Error getting PR branch');
pr.isConflicted = true;
}
}
return pr;
}
// Return a list of all modified files in a PR
export async function getPrFiles(mrNo: number) {
logger.debug({ mrNo }, 'getPrFiles');
if (!mrNo) {
return [];
}
const files = (await api.get(
`projects/${config.repository}/merge_requests/${mrNo}/changes`
)).body.changes;
return files.map((f: { new_path: string }) => f.new_path);
}
// istanbul ignore next
async function closePr(iid: number) {
await api.put(`projects/${config.repository}/merge_requests/${iid}`, {
body: {
state_event: 'close',
},
});
}
export async function updatePr(
iid: number,
title: string,
description: string
) {
await api.put(`projects/${config.repository}/merge_requests/${iid}`, {
body: {
title,
description: sanitize(description),
},
});
}
export async function mergePr(iid: number) {
try {
await api.put(`projects/${config.repository}/merge_requests/${iid}/merge`, {
body: {
should_remove_source_branch: true,
},
});
return true;
} catch (err) /* istanbul ignore next */ {
if (err.statusCode === 401) {
logger.info('No permissions to merge PR');
return false;
}
if (err.statusCode === 406) {
logger.info('PR not acceptable for merging');
return false;
}
logger.debug({ err }, 'merge PR error');
logger.info('PR merge failed');
return false;
}
}
export function getPrBody(input: string) {
return smartTruncate(
input
.replace(/Pull Request/g, 'Merge Request')
.replace(/PR/g, 'MR')
.replace(/\]\(\.\.\/pull\//g, '](../merge_requests/'),
1000000
);
}
export function getCommitMessages() {
return config.storage.getCommitMessages();
}

Просмотреть файл

@ -1,6 +1,8 @@
import { api as npm } from '../npm';
import { VersioningApi, RangeStrategy } from '../common';
const isVersion = (input: string) => npm.isVersion(input);
function convertToCaret(item: string) {
// In Cargo, "1.2.3" doesn't mean exactly 1.2.3, it means >= 1.2.3 < 2.0.0
if (isVersion(item)) {
@ -43,8 +45,6 @@ const isLessThanRange = (version: string, range: string) =>
export const isValid = (input: string) => npm.isValid(cargo2npm(input));
const isVersion = (input: string) => npm.isVersion(input);
const matches = (version: string, range: string) =>
npm.matches(version, cargo2npm(range));

Просмотреть файл

@ -18,9 +18,7 @@ for (const scheme of supportedSchemes) {
schemes[scheme] = require('./' + scheme).api; // eslint-disable-line
}
export { get };
function get(versionScheme: string): VersioningApi {
export function get(versionScheme: string): VersioningApi {
if (!versionScheme) {
logger.debug('Missing versionScheme');
return schemes.semver as VersioningApi;

Просмотреть файл

@ -12,25 +12,6 @@ export interface VersionComparator {
(version: string, other: string): number;
}
// helper functions to ease create other versioning schemas with little code
// especially if those schemas do not support ranges
export const create = ({
parse,
compare,
}: {
parse: VersionParser;
compare: VersionComparator;
}) => {
let schema: VersioningApi = {} as any;
if (parse) {
schema = { ...schema, ...parser(parse) };
}
if (compare) {
schema = { ...schema, ...comparer(compare) };
}
return schema;
};
// since this file was meant for no range support, a range = version
// parse should return null if version not valid
// parse should return an object with property release, an array of version sections major.minor.patch
@ -119,6 +100,25 @@ export const comparer = (
};
};
// helper functions to ease create other versioning schemas with little code
// especially if those schemas do not support ranges
export const create = ({
parse,
compare,
}: {
parse: VersionParser;
compare: VersionComparator;
}) => {
let schema: VersioningApi = {} as any;
if (parse) {
schema = { ...schema, ...parser(parse) };
}
if (compare) {
schema = { ...schema, ...comparer(compare) };
}
return schema;
};
export abstract class GenericVersioningApi<
T extends GenericVersion = GenericVersion
> implements VersioningApi {

Просмотреть файл

@ -267,25 +267,9 @@ function isVersion(version: string) {
return !!tokens.length;
}
function isValid(str: string) {
if (!str) {
return false;
}
return isVersion(str) || !!parseRange(str);
}
const INCLUDING_POINT = 'INCLUDING_POINT';
const EXCLUDING_POINT = 'EXCLUDING_POINT';
export interface Range {
leftType: typeof INCLUDING_POINT | typeof EXCLUDING_POINT;
leftValue: string;
leftBracket: string;
rightType: typeof INCLUDING_POINT | typeof EXCLUDING_POINT;
rightValue: string;
rightBracket: string;
}
function parseRange(rangeStr: string) {
function emptyInterval(): Range {
return {
@ -383,6 +367,22 @@ function parseRange(rangeStr: string) {
);
}
function isValid(str: string) {
if (!str) {
return false;
}
return isVersion(str) || !!parseRange(str);
}
export interface Range {
leftType: typeof INCLUDING_POINT | typeof EXCLUDING_POINT;
leftValue: string;
leftBracket: string;
rightType: typeof INCLUDING_POINT | typeof EXCLUDING_POINT;
rightValue: string;
rightBracket: string;
}
function rangeToStr(fullRange: Range[]): string | null {
if (fullRange === null) return null;

Просмотреть файл

@ -11,9 +11,7 @@ import { parseRange } from 'semver-utils';
import { logger } from '../../logger';
import { RangeStrategy } from '../common';
export { getNewValue };
function getNewValue(
export function getNewValue(
currentValue: string,
rangeStrategy: RangeStrategy,
fromVersion: string,

Просмотреть файл

@ -4,8 +4,6 @@ import { parse as parseRange } from '@renovate/pep440/lib/specifier';
import { logger } from '../../logger';
import { RangeStrategy } from '../common';
export { getNewValue };
function getFutureVersion(
baseVersion: string,
toVersion: string,
@ -37,7 +35,7 @@ interface Range {
version: string;
}
function getNewValue(
export function getNewValue(
currentValue: string,
rangeStrategy: RangeStrategy,
fromVersion: string,

Просмотреть файл

@ -62,6 +62,25 @@ const isSingleVersion = (constraint: string) =>
)) ||
isVersion(constraint.trim());
function handleShort(
operator: string,
currentValue: string,
toVersion: string
) {
const toVersionMajor = major(toVersion);
const toVersionMinor = minor(toVersion);
const split = currentValue.split('.');
if (split.length === 1) {
// [^,~]4
return operator + toVersionMajor;
}
if (split.length === 2) {
// [^,~]4.1
return operator + toVersionMajor + '.' + toVersionMinor;
}
return null;
}
function getNewValue(
currentValue: string,
rangeStrategy: RangeStrategy,
@ -97,25 +116,6 @@ function getNewValue(
return newPoetry;
}
function handleShort(
operator: string,
currentValue: string,
toVersion: string
) {
const toVersionMajor = major(toVersion);
const toVersionMinor = minor(toVersion);
const split = currentValue.split('.');
if (split.length === 1) {
// [^,~]4
return operator + toVersionMajor;
}
if (split.length === 2) {
// [^,~]4.1
return operator + toVersionMajor + '.' + toVersionMinor;
}
return null;
}
export const api: VersioningApi = {
...npm,
getNewValue,

Просмотреть файл

@ -13,6 +13,15 @@ export interface RegExpVersion extends GenericVersion {
compatibility: string;
}
// convenience method for passing a Version object into any semver.* method.
function asSemver(version: RegExpVersion): string {
let vstring = `${version.release[0]}.${version.release[1]}.${version.release[2]}`;
if (typeof version.prerelease !== 'undefined') {
vstring += `-${version.prerelease}`;
}
return vstring;
}
export class RegExpVersioningApi extends GenericVersioningApi<RegExpVersion> {
// config is expected to be overridden by a user-specified RegExp value
// sample values:
@ -112,15 +121,6 @@ export class RegExpVersioningApi extends GenericVersioningApi<RegExpVersion> {
}
}
// convenience method for passing a Version object into any semver.* method.
function asSemver(version: RegExpVersion): string {
let vstring = `${version.release[0]}.${version.release[1]}.${version.release[2]}`;
if (typeof version.prerelease !== 'undefined') {
vstring += `-${version.prerelease}`;
}
return vstring;
}
export const api: VersioningApiConstructor = RegExpVersioningApi;
export default api;

Просмотреть файл

@ -42,6 +42,17 @@ export type ProcessBranchResult =
| 'pr-edited'
| 'pr-hourly-limit-reached';
// TODO: proper typings
function rebaseCheck(config: RenovateConfig, branchPr: any): boolean {
const titleRebase = branchPr.title && branchPr.title.startsWith('rebase!');
const labelRebase =
branchPr.labels && branchPr.labels.includes(config.rebaseLabel);
const prRebaseChecked =
branchPr.body && branchPr.body.includes(`- [x] <!-- ${appSlug}-rebase -->`);
return titleRebase || labelRebase || prRebaseChecked;
}
export async function processBranch(
branchConfig: BranchConfig,
prHourlyLimitReached?: boolean,
@ -555,14 +566,3 @@ export async function processBranch(
}
return 'done';
}
// TODO: proper typings
function rebaseCheck(config: RenovateConfig, branchPr: any): boolean {
const titleRebase = branchPr.title && branchPr.title.startsWith('rebase!');
const labelRebase =
branchPr.labels && branchPr.labels.includes(config.rebaseLabel);
const prRebaseChecked =
branchPr.body && branchPr.body.includes(`- [x] <!-- ${appSlug}-rebase -->`);
return titleRebase || labelRebase || prRebaseChecked;
}

Просмотреть файл

@ -4,26 +4,26 @@ const limitsToInit = ['prCommitsPerRunLimit'];
const l: Record<string, number> = {};
const v: Record<string, number> = {};
export function setLimit(name: string, value: number) {
logger.debug(`Limits.setLimit l[${name}] = ${value}`);
l[name] = value;
}
export function init(config: Record<string, any>) {
logger.info(`Limits.init enter method`);
logger.debug(`Limits.init enter method`);
for (const limit of limitsToInit) {
logger.info(`Limits.init ${limit} processing`);
logger.debug(`Limits.init ${limit} processing`);
if (config[limit]) {
setLimit(limit, config[limit]);
v[limit] = 0;
} else {
logger.info(
logger.debug(
`Limits.init ${limit} variable is not set. Ignoring ${limit}`
);
}
}
}
export function setLimit(name: string, value: number) {
logger.debug(`Limits.setLimit l[${name}] = ${value}`);
l[name] = value;
}
export function getLimitRemaining(name: string) {
let result;
if (typeof v[name] !== 'undefined') {

Просмотреть файл

@ -50,6 +50,84 @@ export interface LookupUpdateConfig
separateMultipleMajor?: boolean;
}
function getType(
config: LookupUpdateConfig,
fromVersion: string,
toVersion: string
): string {
const { versionScheme, rangeStrategy, currentValue } = config;
const version = versioning.get(versionScheme);
if (rangeStrategy === 'bump' && version.matches(toVersion, currentValue)) {
return 'bump';
}
if (version.getMajor(toVersion) > version.getMajor(fromVersion)) {
return 'major';
}
if (version.getMinor(toVersion) > version.getMinor(fromVersion)) {
return 'minor';
}
if (config.separateMinorPatch) {
return 'patch';
}
if (config.patch.automerge && !config.minor.automerge) {
return 'patch';
}
return 'minor';
}
function getFromVersion(
config: LookupUpdateConfig,
rangeStrategy: string,
latestVersion: string,
allVersions: string[]
): string | null {
const { currentValue, lockedVersion, versionScheme } = config;
const version = versioning.get(versionScheme);
if (version.isVersion(currentValue)) {
return currentValue;
}
if (version.isSingleVersion(currentValue)) {
return currentValue.replace(/=/g, '').trim();
}
logger.trace(`currentValue ${currentValue} is range`);
let useVersions = allVersions.filter(v => version.matches(v, currentValue));
if (latestVersion && version.matches(latestVersion, currentValue)) {
useVersions = useVersions.filter(
v => !version.isGreaterThan(v, latestVersion)
);
}
if (rangeStrategy === 'pin') {
return (
lockedVersion || version.maxSatisfyingVersion(useVersions, currentValue)
);
}
if (rangeStrategy === 'bump') {
// Use the lowest version in the current range
return version.minSatisfyingVersion(useVersions, currentValue);
}
// Use the highest version in the current range
return version.maxSatisfyingVersion(useVersions, currentValue);
}
function getBucket(config: LookupUpdateConfig, update: LookupUpdate) {
const { separateMajorMinor, separateMultipleMajor } = config;
const { updateType, newMajor } = update;
if (updateType === 'lockfileUpdate') {
return updateType;
}
if (
!separateMajorMinor ||
config.major.automerge === true ||
(config.automerge && config.major.automerge !== false)
) {
return 'latest';
}
if (separateMultipleMajor && updateType === 'major') {
return `major-${newMajor}`;
}
return updateType;
}
export async function lookupUpdates(
config: LookupUpdateConfig
): Promise<UpdateResult> {
@ -339,81 +417,3 @@ export async function lookupUpdates(
}
return res;
}
function getType(
config: LookupUpdateConfig,
fromVersion: string,
toVersion: string
): string {
const { versionScheme, rangeStrategy, currentValue } = config;
const version = versioning.get(versionScheme);
if (rangeStrategy === 'bump' && version.matches(toVersion, currentValue)) {
return 'bump';
}
if (version.getMajor(toVersion) > version.getMajor(fromVersion)) {
return 'major';
}
if (version.getMinor(toVersion) > version.getMinor(fromVersion)) {
return 'minor';
}
if (config.separateMinorPatch) {
return 'patch';
}
if (config.patch.automerge && !config.minor.automerge) {
return 'patch';
}
return 'minor';
}
function getBucket(config: LookupUpdateConfig, update: LookupUpdate) {
const { separateMajorMinor, separateMultipleMajor } = config;
const { updateType, newMajor } = update;
if (updateType === 'lockfileUpdate') {
return updateType;
}
if (
!separateMajorMinor ||
config.major.automerge === true ||
(config.automerge && config.major.automerge !== false)
) {
return 'latest';
}
if (separateMultipleMajor && updateType === 'major') {
return `major-${newMajor}`;
}
return updateType;
}
function getFromVersion(
config: LookupUpdateConfig,
rangeStrategy: string,
latestVersion: string,
allVersions: string[]
): string | null {
const { currentValue, lockedVersion, versionScheme } = config;
const version = versioning.get(versionScheme);
if (version.isVersion(currentValue)) {
return currentValue;
}
if (version.isSingleVersion(currentValue)) {
return currentValue.replace(/=/g, '').trim();
}
logger.trace(`currentValue ${currentValue} is range`);
let useVersions = allVersions.filter(v => version.matches(v, currentValue));
if (latestVersion && version.matches(latestVersion, currentValue)) {
useVersions = useVersions.filter(
v => !version.isGreaterThan(v, latestVersion)
);
}
if (rangeStrategy === 'pin') {
return (
lockedVersion || version.maxSatisfyingVersion(useVersions, currentValue)
);
}
if (rangeStrategy === 'bump') {
// Use the lowest version in the current range
return version.minSatisfyingVersion(useVersions, currentValue);
}
// Use the highest version in the current range
return version.maxSatisfyingVersion(useVersions, currentValue);
}

Просмотреть файл

@ -23,6 +23,42 @@ function ifTypesGroup(
);
}
function getTableValues(
upgrade: PackageDependency & ManagerConfig
): [string, string, string, string] | null {
if (!upgrade.commitBodyTable) {
return null;
}
const {
datasource,
lookupName,
depName,
fromVersion,
toVersion,
displayFrom,
displayTo,
} = upgrade;
const name = lookupName || depName;
const from = fromVersion || displayFrom;
const to = toVersion || displayTo;
if (datasource && name && from && to) {
return [datasource, name, from, to];
}
logger.debug(
{
datasource,
lookupName,
depName,
fromVersion,
toVersion,
displayFrom,
displayTo,
},
'Cannot determine table values'
);
return null;
}
export function generateBranchConfig(branchUpgrades) {
logger.debug(`generateBranchConfig(${branchUpgrades.length})`);
logger.trace({ config: branchUpgrades });
@ -290,39 +326,3 @@ export function generateBranchConfig(branchUpgrades) {
}
return config;
}
function getTableValues(
upgrade: PackageDependency & ManagerConfig
): [string, string, string, string] | null {
if (!upgrade.commitBodyTable) {
return null;
}
const {
datasource,
lookupName,
depName,
fromVersion,
toVersion,
displayFrom,
displayTo,
} = upgrade;
const name = lookupName || depName;
const from = fromVersion || displayFrom;
const to = toVersion || displayTo;
if (datasource && name && from && to) {
return [datasource, name, from, to];
}
logger.debug(
{
datasource,
lookupName,
depName,
fromVersion,
toVersion,
displayFrom,
displayTo,
},
'Cannot determine table values'
);
return null;
}

Просмотреть файл

@ -85,6 +85,10 @@ describe('datasource/maven', () => {
nock.enableNetConnect();
});
function generateReleases(versions) {
return versions.map(v => ({ version: v }));
}
describe('getPkgReleases', () => {
it('should return empty if library is not found', async () => {
const releases = await datasource.getPkgReleases({
@ -277,7 +281,3 @@ describe('datasource/maven', () => {
});
});
});
function generateReleases(versions) {
return versions.map(v => ({ version: v }));
}

Просмотреть файл

@ -113,18 +113,6 @@ describe('platform/azure', () => {
});
});
describe('getRepoStatus()', () => {
it('exists', async () => {
await initRepo();
expect(await azure.getRepoStatus()).toBeUndefined();
});
});
describe('cleanRepo()', () => {
it('exists', () => {
azure.cleanRepo();
});
});
function initRepo(args?: Partial<RepoParams> | string) {
azureApi.gitApi.mockImplementationOnce(
() =>
@ -172,6 +160,19 @@ describe('platform/azure', () => {
} as any);
}
describe('getRepoStatus()', () => {
it('exists', async () => {
await initRepo();
expect(await azure.getRepoStatus()).toBeUndefined();
});
});
describe('cleanRepo()', () => {
it('exists', () => {
azure.cleanRepo();
});
});
describe('initRepo', () => {
it(`should initialise the config for a repo`, async () => {
const config = await initRepo({

Просмотреть файл

@ -125,17 +125,6 @@ describe('platform/gitlab', () => {
expect(repos).toMatchSnapshot();
});
});
describe('getRepoStatus()', () => {
it('exists', async () => {
await initRepo();
await gitlab.getRepoStatus();
});
});
describe('cleanRepo()', () => {
it('exists', () => {
gitlab.cleanRepo();
});
});
function initRepo(args?: any) {
// projects/${config.repository}
api.get.mockImplementationOnce(
@ -166,6 +155,17 @@ describe('platform/gitlab', () => {
optimizeForDisabled: false,
});
}
describe('getRepoStatus()', () => {
it('exists', async () => {
await initRepo();
await gitlab.getRepoStatus();
});
});
describe('cleanRepo()', () => {
it('exists', () => {
gitlab.cleanRepo();
});
});
describe('initRepo', () => {
it(`should throw error if disabled in renovate.json`, async () => {