Skip to content

Feature/plat 1902 (DO NOT MEREGE) #301

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 8 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions .eslintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
{
"env": {
"es2022": true
},
"plugins": [
"@typescript-eslint",
"prettier"
],
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/recommended-requiring-type-checking",
"prettier"
],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 2022
},
"rules": {
"prefer-const": "error",
}
}
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ node_modules/
.env
coverage/
.nyc_output/
docker/api.env
docker/api.env
.npmrc
4 changes: 4 additions & 0 deletions .husky/commit-msg
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"

npx --no -- commitlint --edit "$1"
4 changes: 4 additions & 0 deletions .husky/pre-commit
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"

npx lint-staged
1 change: 0 additions & 1 deletion Procfile

This file was deleted.

21 changes: 12 additions & 9 deletions app.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ const _ = require('lodash')
const winston = require('winston')
const helper = require('./src/common/helper')
const errorMiddleware = require('./src/common/ErrorMiddleware')
const routes = require('./src/routes')
const routes = require('./src/routes');
const swaggerUi = require('swagger-ui-express')
const YAML = require('yamljs')
const authenticator = require('tc-core-library-js').middleware.jwtAuthenticator
Expand All @@ -37,7 +37,7 @@ const apiRouter = express.Router()
* @param {Array} source the array in which to search for the term
* @param {Array | String} term the term to search
*/
function checkIfExists (source, term) {
function checkIfExists(source, term) {
let terms

if (!_.isArray(source)) {
Expand Down Expand Up @@ -67,13 +67,17 @@ function checkIfExists (source, term) {
_.each(routes, (verbs, url) => {
_.each(verbs, (def, verb) => {
let actions = [
(req, res, next) => {
async (req, res, next) => {
req.signature = `${def.controller}#${def.method}`
req.authUser = req.authUser || {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Need to clean up these hardcoded values

userId: 'a84a4180-65aa-42ec-a945-5fd21dec1567',
roles: ['Administrator'],
scopes: ['read:submission', 'all:submission']
}
next()
}
]
const method = require(`./src/controllers/${def.controller}`)[ def.method ]; // eslint-disable-line

const method = require(`./src/controllers/${def.controller}`)[def.method]; // eslint-disable-line
if (!method) {
throw new Error(`${def.method} is undefined, for controller ${def.controller}`)
}
Expand All @@ -83,9 +87,9 @@ _.each(routes, (verbs, url) => {

// add Authenticator check if route has auth
if (def.auth) {
actions.push((req, res, next) => {
authenticator(_.pick(config, ['AUTH_SECRET', 'VALID_ISSUERS']))(req, res, next)
})
// actions.push((req, res, next) => {
// authenticator(_.pick(config, ['AUTH_SECRET', 'VALID_ISSUERS']))(req, res, next)
// })

actions.push((req, res, next) => {
if (!req.authUser) {
Expand Down Expand Up @@ -125,7 +129,6 @@ _.each(routes, (verbs, url) => {
}
})
}

actions.push(method)
winston.info(`API : ${verb.toLocaleUpperCase()} ${config.API_VERSION}${url}`)
apiRouter[verb](`${config.API_VERSION}${url}`, helper.autoWrapExpress(actions))
Expand Down
20 changes: 20 additions & 0 deletions commitlint.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
module.exports = {
extends: ["@commitlint/config-conventional"],
rules: {
"type-enum": [
2,
"always",
[
"feat",
"fix",
"docs",
"chore",
"refactor",
"ci",
"test",
"perf",
"revert",
],
],
},
};
75 changes: 43 additions & 32 deletions config/default.js
Original file line number Diff line number Diff line change
@@ -1,56 +1,67 @@
/**
* Default configuration file
*/
require('dotenv').config()
require("dotenv").config();
module.exports = {
DISABLE_LOGGING: process.env.DISABLE_LOGGING || false, // If true, logging will be disabled
LOG_LEVEL: process.env.LOG_LEVEL || 'debug',
LOG_LEVEL: process.env.LOG_LEVEL || "debug",
WEB_SERVER_PORT: process.env.PORT || 3000,
AUTH_SECRET: process.env.AUTH_SECRET || 'mysecret',
VALID_ISSUERS: process.env.VALID_ISSUERS ? process.env.VALID_ISSUERS.replace(/\\"/g, '') : '["https://api.topcoder.com","https://topcoder-dev.auth0.com/"]',
HOST: process.env.HOST || 'localhost:3000',
API_VERSION: process.env.API_VERSION || '/api/v5',
DEFAULT_MESSAGE: 'Internal Server Error',
AUTH_SECRET: process.env.AUTH_SECRET || "mysecret",
VALID_ISSUERS: process.env.VALID_ISSUERS
? process.env.VALID_ISSUERS.replace(/\\"/g, "")
: '["https://api.topcoder.com","https://topcoder-dev.auth0.com/"]',
HOST: process.env.HOST || "localhost:3000",
API_VERSION: process.env.API_VERSION || "/api/v5",
DEFAULT_MESSAGE: "Internal Server Error",
aws: {
AWS_REGION: process.env.AWS_REGION || 'us-east-1', // AWS Region to be used by the application
AWS_REGION: process.env.AWS_REGION || "us-east-1", // AWS Region to be used by the application
AWS_READ_UNITS: process.env.AWS_READ_UNITS || 5,
AWS_WRITE_UNITS: process.env.AWS_WRITE_UNITS || 5,
S3_BUCKET: process.env.S3_BUCKET || 'tc-testing-submissions', // S3 Bucket to which submissions need to be uploaded
ARTIFACT_BUCKET: process.env.ARTIFACT_BUCKET || 'tc-testing-submissions' // S3 bucket to which artifacts need to be uploaded
S3_BUCKET: process.env.S3_BUCKET || "tc-testing-submissions", // S3 Bucket to which submissions need to be uploaded
ARTIFACT_BUCKET: process.env.ARTIFACT_BUCKET || "tc-testing-submissions", // S3 bucket to which artifacts need to be uploaded
},
BUSAPI_URL: process.env.BUSAPI_URL || 'https://api.topcoder-dev.com/v5',
KAFKA_ERROR_TOPIC: process.env.KAFKA_ERROR_TOPIC || 'error.notification',
KAFKA_AGGREGATE_TOPIC: process.env.KAFKA_AGGREGATE_TOPIC || 'submission.notification.aggregate',
CHALLENGEAPI_V5_URL: process.env.CHALLENGEAPI_V5_URL || 'https://api.topcoder-dev.com/v5/challenges',
RESOURCEAPI_V5_BASE_URL: process.env.RESOURCEAPI_V5_BASE_URL || 'https://api.topcoder-dev.com/v5',
BUSAPI_URL: process.env.BUSAPI_URL || "https://api.topcoder-dev.com/v5",
KAFKA_ERROR_TOPIC: process.env.KAFKA_ERROR_TOPIC || "error.notification",
KAFKA_AGGREGATE_TOPIC:
process.env.KAFKA_AGGREGATE_TOPIC || "submission.notification.aggregate",
CHALLENGEAPI_V5_URL:
process.env.CHALLENGEAPI_V5_URL ||
"https://api.topcoder-dev.com/v5/challenges",
RESOURCEAPI_V5_BASE_URL:
process.env.RESOURCEAPI_V5_BASE_URL || "https://api.topcoder-dev.com/v5",
AUTH0_URL: process.env.AUTH0_URL, // Auth0 credentials for Submission Service
AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE || 'https://www.topcoder.com',
AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE || "https://www.topcoder.com",
TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME,
AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID,
AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET,
esConfig: {
HOST: process.env.ES_HOST || 'localhost:9200',
API_VERSION: process.env.ES_API_VERSION || '6.3',
ES_INDEX: process.env.ES_INDEX || 'submission',
ES_TYPE: process.env.ES_TYPE || '_doc' // ES 6.x accepts only 1 Type per index and it's mandatory to define it
HOST: process.env.ES_HOST || "https://localhost:9200",
API_VERSION: process.env.ES_API_VERSION || "6.3",
ES_INDEX: process.env.ES_INDEX || "submission",
ES_TYPE: process.env.ES_TYPE || "_doc", // ES 6.x accepts only 1 Type per index and it's mandatory to define it
},
PAGE_SIZE: process.env.PAGE_SIZE || 20,
MAX_PAGE_SIZE: parseInt(process.env.MAX_PAGE_SIZE) || 100,
ES_BATCH_SIZE: process.env.ES_BATCH_SIZE || 1000,
UPDATE_V5_CHALLENGE_BATCH_SIZE: process.env.UPDATE_V5_CHALLENGE_BATCH_SIZE || 100,
SUBMISSION_TABLE_NAME: process.env.SUBMISSION_TABLE_NAME || 'Submission',
UPDATE_V5_CHALLENGE_BATCH_SIZE:
process.env.UPDATE_V5_CHALLENGE_BATCH_SIZE || 100,
SUBMISSION_TABLE_NAME: process.env.SUBMISSION_TABLE_NAME || "Submission",
AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL,
FETCH_CREATED_DATE_START: process.env.FETCH_CREATED_DATE_START || '2021-01-01',
FETCH_CREATED_DATE_START:
process.env.FETCH_CREATED_DATE_START || "2021-01-01",
FETCH_PAGE_SIZE: process.env.FETCH_PAGE_SIZE || 500,
MIGRATE_CHALLENGES: process.env.MIGRATE_CHALLENGES || [],

V5TOLEGACYSCORECARDMAPPING: {
'c56a4180-65aa-42ec-a945-5fd21dec0501': 30001363,
'c56a4180-65aa-42ec-a945-5fd21dec0502': 123456789,
'c56a4180-65aa-42ec-a945-5fd21dec0503': 30001031,
'c56a4180-65aa-42ec-a945-5fd21dec0504': 987654321,
'c56a4180-65aa-42ec-a945-5fd21dec0505': 987123456,
'9ecc88e5-a4ee-44a4-8ec1-70bd98022510': 123789456,
'd6d31f34-8ee5-4589-ae65-45652fcc01a6': 30000720
}
}
"c56a4180-65aa-42ec-a945-5fd21dec0501": 30001363,
"c56a4180-65aa-42ec-a945-5fd21dec0502": 123456789,
"c56a4180-65aa-42ec-a945-5fd21dec0503": 30001031,
"c56a4180-65aa-42ec-a945-5fd21dec0504": 987654321,
"c56a4180-65aa-42ec-a945-5fd21dec0505": 987123456,
"9ecc88e5-a4ee-44a4-8ec1-70bd98022510": 123789456,
"d6d31f34-8ee5-4589-ae65-45652fcc01a6": 30000720,
},
GRPC_SUBMISSION_SERVER_HOST:
process.env.GRPC_SUBMISSION_SERVER_HOST || "localhost",
GRPC_SUBMISSION_SERVER_PORT: process.env.GRPC_SUBMISSION_SERVER_PORT || 9092,
};
30 changes: 23 additions & 7 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "submissions-api",
"version": "1.0.0",
"description": "Topcoder Submissions API",
"main": "app.js",
"main": "app.ts",
"scripts": {
"start": "node app.js",
"dev": "nodemon app.js",
Expand All @@ -23,12 +23,15 @@
"services:up": "docker-compose -f ./local/docker-compose.yml up -d",
"services:down": "docker-compose -f ./local/docker-compose.yml down",
"services:logs": "docker-compose -f ./local/docker-compose.yml logs",
"local:init": "npm run init-db && npm run init-es"
"local:init": "npm run init-db && npm run init-es",
"prepare": "husky install"
},
"dependencies": {
"@elastic/elasticsearch": "^8.6.0",
"@topcoder-framework/domain-submission": "0.5.4-ci.0",
"@topcoder-framework/lib-common": "^v0.4.18-ci.0",
"amazon-s3-uri": "0.0.3",
"aws-sdk": "^2.265.1",
"bluebird": "^3.5.1",
"body-parser": "^1.18.3",
"co": "^4.6.0",
"common-errors": "^1.0.4",
Expand All @@ -53,20 +56,33 @@
"yamljs": "^0.3.0"
},
"devDependencies": {
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"aws-sdk-mock": "^4.0.0",
"chai": "^4.1.2",
"chai-http": "^4.0.0",
"co-mocha": "^1.2.2",
"husky": "^3.0.5",
"commitlint": "^17.4.2",
"eslint": "^8.31.0",
"eslint-config-prettier": "^8.6.0",
"eslint-plugin-prettier": "^4.2.1",
"husky": "^8.0.3",
"mocha": "^5.2.0",
"mocha-prepare": "^0.1.0",
"nock": "^9.4.3",
"nodemon": "^1.17.5",
"nyc": "^12.0.2",
"standard": "^11.0.1"
"prettier": "^2.8.2",
"standard": "^11.0.1",
"typescript": "^4.9.4"
},
"engines": {
"node": "12.22.12"
"lint-staged": {
"*.{ts,js}": [
"npx prettier --write"
],
"*.{json,md,yml}": [
"npx prettier --write"
]
},
"standard": {
"env": [
Expand Down
27 changes: 17 additions & 10 deletions scripts/migrateFromDBToES.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@ const esClient = helper.getEsClient()
* @param customFunction {Function} custom function to handle record
* @returns {Promise}
*/
function * migrateRecords (tableName, customFunction) {
async function migrateRecords(tableName, customFunction) {
let body = []
let batchCounter = 1
const params = {
TableName: tableName
}
// Process until all the records from DB is fetched
while (true) {
const records = yield dbhelper.scanRecords(params)
const records = await dbhelper.scanRecords(params)
logger.debug(`Number of ${tableName}s currently fetched from DB - ` + records.Items.length)
let i = 0
for (const recordItem of records.Items) {
Expand All @@ -39,13 +39,20 @@ function * migrateRecords (tableName, customFunction) {
// data
body.push(_.extend({ resource: helper.camelize(tableName) }, item))


if (i % config.ES_BATCH_SIZE === 0) {
logger.debug(`${tableName} - Processing batch # ` + batchCounter)
yield esClient.bulk({
index: config.get('esConfig.ES_INDEX'),
type: config.get('esConfig.ES_TYPE'),
body
})
try {

await esClient.bulk({
index: config.get('esConfig.ES_INDEX'),
// type: config.get('esConfig.ES_TYPE'),
body
})
} catch (err) {
console.log("************** Error **************");
console.log(err);
}
body = []
batchCounter++
}
Expand All @@ -58,9 +65,9 @@ function * migrateRecords (tableName, customFunction) {
} else {
if (body.length > 0) {
logger.debug(`${tableName} - Final batch processing...`)
yield esClient.bulk({
await esClient.bulk({
index: config.get('esConfig.ES_INDEX'),
type: config.get('esConfig.ES_TYPE'),
// type: config.get('esConfig.ES_TYPE'),
body
})
}
Expand All @@ -69,7 +76,7 @@ function * migrateRecords (tableName, customFunction) {
}
}

co(function * () {
co(function* () {
const promises = []
const reviews = []
const reviewSummations = []
Expand Down
3 changes: 1 addition & 2 deletions src/bootstrap.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
* Initialize application and load routes
*/

global.Promise = require('bluebird')
const config = require('config')
const fs = require('fs')
const joi = require('joi')
Expand All @@ -15,7 +14,7 @@ joi.pageSize = () => joi.number().integer().min(1).max(config.get('MAX_PAGE_SIZE
joi.sortOrder = () => joi.string().valid('asc', 'desc', 'ASC', 'DESC')
joi.reviewStatus = () => joi.string().valid('queued', 'completed')

function buildServices (dir) {
function buildServices(dir) {
const files = fs.readdirSync(dir)

files.forEach((file) => {
Expand Down
Loading