Skip to content

Commit

Permalink
Merge branch 'master' into dependabot/pip/controller/certifi-2022.12.7
Browse files Browse the repository at this point in the history
  • Loading branch information
haraprasadj authored Dec 16, 2024
2 parents c7374a6 + 4a8713f commit 5902631
Show file tree
Hide file tree
Showing 18 changed files with 192 additions and 464 deletions.
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
#!/bin/bash

set -x
set -e

# This automation is required so Gen3 Engineering can produce a copy of a branch from a forked-repo
# and trigger a Quay image build that is utilized in our CI Pipeline.

git config --global user.email "[email protected]"
git config --global user.name "$GITHUB_USERNAME"

OUR_REMOTE_URL="https://github.com/uc-cdis/${OUR_GEN3_SERVICE_REPO_NAME}"
URL_PREFIX="https://${GITHUB_USERNAME}:${GITHUB_TOKEN}@github.com/uc-cdis/"
OUR_REMOTE_URL="${URL_PREFIX}${OUR_GEN3_SERVICE_REPO_NAME}"

echo "cloning $OUR_REMOTE_URL..."
git clone $OUR_REMOTE_URL
Expand All @@ -18,15 +22,19 @@ cd $OUR_GEN3_SERVICE_REPO_NAME

ls -ilha

set +e
# delete branch if it already exists
branch_exists=$(git ls-remote --heads ${OUR_REMOTE_URL}.git automatedCopy-$NAME_OF_THE_BRANCH)
set -e

if [[ -z $branch_exists ]]; then
echo "git ls-remote output empty. The branch does not exist."
else
echo "WARN: git ls-remote output is NOT empty."
echo " Deleting the existing automatedCopy branch to create a new copy based on new changes from the forked-repo branch..."
set +e
git branch -D automatedCopy-$NAME_OF_THE_BRANCH
set -e
git push origin --delete automatedCopy-$NAME_OF_THE_BRANCH
fi

Expand All @@ -37,10 +45,9 @@ echo "changing origin to pull changes from external repo: https://${EXTERNAL_REP
git remote set-url origin https://${EXTERNAL_REPO_REMOTE_URL}

echo "pulling changes from external branch $NAME_OF_THE_BRANCH"
git pull origin $NAME_OF_THE_BRANCH
git pull origin $NAME_OF_THE_BRANCH --no-rebase

echo "restore original origin $OUR_REMOTE_URL"
URL_PREFIX="https://${GITHUB_USERNAME}:${GITHUB_TOKEN}@github.com/uc-cdis/"
git remote set-url origin ${URL_PREFIX}${OUR_GEN3_SERVICE_REPO_NAME}.git

echo "finish branch cloning process but pushing local changes to our repo's branch."
Expand Down
2 changes: 1 addition & 1 deletion run-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -527,7 +527,7 @@ elif ! (g3kubectl get pods --no-headers -l app=hatchery | grep hatchery) > /dev/
donot '@exportToWorkspacePortalHatchery'
fi

if [[ "$service" == "pelican" || "$service" == "tube" || "$service" == "cdis-manifest" || "$service" == "gen3-qa" ]]; then
if [[ "$service" == "pelican" || "$service" == "tube" || "$service" == "cdis-manifest" || "$service" == "gen3-qa" || "$service" == "gitops-qa" ]]; then
echo "Running pfbExportTest since repo is $service"
else
echo "Skipping pfbExportTest since repo is $service"
Expand Down
25 changes: 23 additions & 2 deletions services/portal/discovery/discoveryQuestions.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
const { sleepMS } = require('../../../utils/apiUtil.js');
const props = require('./discoveryProps.js');
const tasks = require('./discoveryTasks.js');

const I = actor();

Expand All @@ -11,7 +13,26 @@ module.exports = {
I.seeElement(props.readyCue, 30);
},

isStudyFound(studyId) {
I.seeElement(props.studyLocator(studyId), 30);
async isStudyFound(studyId) {
const retries = 10; // Number of retries
const delayInMs = 30000; // Delay between retries in milliseconds

for (let attempt = 1; attempt <= retries; attempt++) {
const visibleElements = await I.grabNumberOfVisibleElements(props.studyLocator(studyId));

if (visibleElements > 0) {
console.log(`Study found on attempt ${attempt}`);
return true; // Exit the loop if the element is found
} else {
console.log(`Attempt ${attempt} failed. Retrying in ${delayInMs / 1000} seconds...`);
if (attempt < retries) {
await sleepMS(delayInMs); // Wait before retrying
tasks.goToPage(); // Refresh discovery page
} else {
console.error('Study not found after maximum retries.');
throw new Error('Study not found.');
}
}
}
},
};
2 changes: 1 addition & 1 deletion services/portal/home/homeProps.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,6 @@ module.exports = {
},

systemUseAcceptButton: {
xpath: '//div[@id="popup"]//button[contains(text(), "Accept")]',
xpath: '//div[@class="popup__foot"]//button[contains(text(), "Accept")]',
},
};
4 changes: 2 additions & 2 deletions services/portal/home/homeTasks.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,10 @@ module.exports = {
portal.seeProp(homeProps.ready_cue, 60);
},

async handleSystemUsePopup() {
I.saveScreenshot('SystemUsePopup.png');
async handleSystemUsePopup() {;
const acceptButtonExists = await tryTo(() => I.waitForElement(homeProps.systemUseAcceptButton, 5));
output.debug(`Accept button found: ${acceptButtonExists}`);
I.saveScreenshot('SystemUsePopup.png')
if (acceptButtonExists) {
output.debug('Handling popup');
I.scrollIntoView(homeProps.systemUseAcceptButton);
Expand Down
5 changes: 3 additions & 2 deletions services/portal/studyRegistration/studyRegistrationTasks.js
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ module.exports = {

fillRequestAccessForm(email, projectTitle) {
I.amOnPage(studyRegistrationProps.requestPath);
I.wait(10)
I.saveScreenshot('FormPage.png');
I.seeElement(studyRegistrationProps.formPage);
I.waitForValue(studyRegistrationProps.projectTitle, projectTitle, 5);
Expand All @@ -77,14 +78,14 @@ module.exports = {
I.click(studyRegistrationProps.registerStudyButton);
},

async fillRegistrationForm(uuid, studyName) {
async fillRegistrationForm(uuid, studyTitle) {
I.amOnPage(studyRegistrationProps.registerPath);
I.seeElement(studyRegistrationProps.registerForm);
const study = await I.grabAttributeFrom(studyRegistrationProps.studyTitle, 'title');
if (process.env.DEBUG === 'true') {
console.log(`### StudyTitle Retrieved: ${study}`);
}
expect(study).to.be.equal(studyName);
expect(study).to.be.equal(studyTitle);
I.fillField(studyRegistrationProps.cedarUUID, uuid);
I.saveScreenshot('registerCedarID.png');
I.click(studyRegistrationProps.registerSubmitButton);
Expand Down
1 change: 0 additions & 1 deletion suites/apis/aggMDSTest.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ I.cache = {};
const testDataFiles = new DataTable(['studyFilePath']);
testDataFiles.add(['test-data/aggMDSTest/study1.json']);
testDataFiles.add(['test-data/aggMDSTest/study2.json']);
testDataFiles.add(['test-data/aggMDSTest/study3.json']);

Data(testDataFiles).Scenario('Create, edit and delete aggregate metadata record', async ({
mds, users, current,
Expand Down
3 changes: 3 additions & 0 deletions suites/apis/oidcClientTest.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ const { expect } = require('chai');
const { Bash } = require('../../utils/bash.js');
const { checkPod, runUserSync, getAccessTokenHeader } = require('../../utils/apiUtil.js');
const { Client } = require('../../services/apis/fence/fenceProps.js');
const { sleepMS } = require('../../utils/apiUtil.js');

const bash = new Bash();

Expand Down Expand Up @@ -87,6 +88,8 @@ Scenario('OIDC Client Rotation @clientRotation @requires-indexd', async ({ I, fe
// to run usersync here now that the client has been created
await runUserSync();
await checkPod(I, 'usersync', 'gen3job,job-name=usersync');
// wait for 10secs after usersync
await sleepMS(10000);

// check that both sets of credentials work:
// - we can get an access token using the creds
Expand Down
21 changes: 21 additions & 0 deletions suites/portal/GWASUIAppTest.js
Original file line number Diff line number Diff line change
Expand Up @@ -276,3 +276,24 @@ Scenario('Unauthorized access to GWAS @GWASUI', async ({
gwas.do.goToAnalysisPage();
gwas.do.unauthorizedUserSelectTeamProject();
});


// TODO : Add/Update existing test cases for "Monthly Workflow Limit"
// Test Case 1: On GWAS workflow submission, "Monthly Workflow Limit" increases by 1
// 1. Log into application using main_account user.
// 2. Goto /analysis/GWASUIApp endpoint.
// 3. Submit the GWAS wokflow.
// 4. Validate the "Monthly Workflow Limit" is increased by 1.

// Test Case 2: User fails to submit GWAS workflow once "Monthly Workflow Limit" is maxed out
// 1. Log into application using main_account user.
// 2. Goto /analysis/GWASUIApp endpoint.
// 3. Verify the "Monthly Workflow Limit" is maxed out.
// 4. Submit the GWAS wokflow.
// 5. Validate an the workflow submission fails.

// Test Case 3: Validate the "Monthly Workflow Limit" on GWAS Apps is same as GWAS result
// 1. Log into application using main_account user.
// 2. Goto /analysis/GWASUIApp endpoint.
// 3. Submit the GWAS wokflow.
// 4. Validate the "Monthly Workflow Limit" is the same in GWAS result (/analysis/GWASResults endpoint)
16 changes: 8 additions & 8 deletions suites/portal/discoveryPageTest.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ const { expect } = require('chai');
const { output } = require('codeceptjs');
const { Bash } = require('../../utils/bash.js');
const fs = require('fs');
const { sleepMS } = require('../../utils/apiUtil.js');

const bash = new Bash();
const I = actor();
Expand Down Expand Up @@ -86,7 +87,7 @@ Scenario('Publish a study, search and export to workspace @requires-indexd @requ
output.print('--- Perform tag search');
discovery.do.tagSearch('TESTING', 'AUTOTEST Tag');
I.saveScreenshot('2_clicked_tag.png');
discovery.ask.isStudyFound(I.cache.studyId);
await discovery.ask.isStudyFound(I.cache.studyId);

// Advanced search
// I.refreshPage();
Expand All @@ -100,24 +101,23 @@ Scenario('Publish a study, search and export to workspace @requires-indexd @requ
I.wait(2);
discovery.do.textSearch('[AUTOTEST Title]');
I.saveScreenshot('4_entered_text.png');
discovery.ask.isStudyFound(I.cache.studyId);
await discovery.ask.isStudyFound(I.cache.studyId);

output.print('--- Perform text search');
I.refreshPage();
I.wait(2);
discovery.do.textSearch('[AUTOTEST Summary]');
I.saveScreenshot('5_entered_text.png');
discovery.ask.isStudyFound(I.cache.studyId);
await discovery.ask.isStudyFound(I.cache.studyId);

output.print('--- Open study in workspace');
discovery.do.openInWorkspace(I.cache.studyId);
I.saveScreenshot('6_open_in_workspace.png');
I.waitInUrl('/workspace', 120);

// --- Disabled the validation since workspaces are failing to launch in jenkins envs ---
// await workspace.do.launchWorkspace('(Tutorial) Bacpac Synthetic Data Analysis Notebook');
await workspace.do.launchWorkspace('(Tutorial) Bacpac Synthetic Data Analysis Notebook');

// output.print('--- Run `gen3 drs-pull object` in a new Python3 notebook');
// await workspace.do.runCommandinPythonNotebook(`!gen3 drs-pull object --object_id ${I.cache.did}`);
// I.saveScreenshot('7_run_drs_pull_in_notebook.png');
output.print('--- Run `gen3 drs-pull object` in a new Python3 notebook');
await workspace.do.runCommandinPythonNotebook(`!gen3 drs-pull object --object_id ${I.cache.did}`);
I.saveScreenshot('7_run_drs_pull_in_notebook.png');
});
2 changes: 1 addition & 1 deletion suites/portal/gen3ffLandingPageTest.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const I = actor();
I.cache = {};

Feature('Gen3 FF Landing Page @requires-portal @requires-frontend-framework');
Feature('Gen3 FF Landing Page @requires-portal @requires-frontend-framework @heal');

Scenario('Home page redirects to landing page', async ({ gen3ffLandingPage }) => {
I.amOnPage('/');
Expand Down
2 changes: 1 addition & 1 deletion suites/portal/indexingPageTest.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
within the sower config block (manifest-indexing & indexd-manifest)
3. The Indexing GUI is only available in data-portal >= 2.24.9
*/
Feature('Indexing GUI @requires-portal @requires-sower @requires-ssjdispatcher');
Feature('Indexing GUI @requires-portal @requires-sower @requires-ssjdispatcher @requires-fence');

const { expect } = require('chai');
const { checkPod, sleepMS } = require('../../utils/apiUtil.js');
Expand Down
26 changes: 19 additions & 7 deletions suites/portal/pfbExportTest.js
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,12 @@ BeforeSuite(async ({ I }) => {
// if this is running against an Anvil DD, sequencing must be used
// TODO: Look into reusing the leafNode logic from jenkins-simulate-data.sh
// eslint-disable-next-line no-nested-ternary
const targetMappingNode = I.cache.testedEnv.includes('anvil') ? 'sequencing' : I.cache.testedEnv.includes('vpodc') ? 'unaligned_reads_file' : 'submitted_unaligned_reads';
let targetMappingNode;
if (process.env.REPO === 'cdis-manifest' || process.env.REPO === 'gitops-qa') {
targetMappingNode = I.cache.testedEnv.includes('anvil') ? 'sequencing' : I.cache.testedEnv.includes('vpodc') ? 'unaligned_reads_file' : 'submitted_unaligned_reads';
} else {
targetMappingNode = I.cache.testedEnv.includes('vpodc') ? 'unaligned_reads_file' : 'sequencing';
}

I.cache.targetMappingNode = targetMappingNode;

Expand Down Expand Up @@ -317,8 +322,8 @@ Scenario('Visit the Explorer page, select a cohort, export to PFB and download t
login.complete.login(users.mainAcct);
I.wait(5);
I.saveScreenshot('before_checking_navbar.png');
const navBarButtons = await I.grabTextFromAll({ xpath: 'xpath: //nav[@class=\'nav-bar__nav--items\']//div/a/descendant-or-self::*' });

const navBarButtons = await I.grabTextFromAll('.body-typo.nav-button');
console.log(`### Nav bar buttons: ${navBarButtons}`);
if (navBarButtons.includes('Exploration')) {
I.amOnPage('explorer');
console.log('### I am on Exploration Page');
Expand Down Expand Up @@ -361,6 +366,7 @@ Scenario('Visit the Explorer page, select a cohort, export to PFB and download t
} else {
I.saveScreenshot('whatTheHellIsGoingOnWithTheNavBar.png');
console.log('WARN: This environment does not have any Explorer or Files button on the navigation bar. This test should not run here');
throw new Error('Test failed: Navigation bar is missing both "Explorer" and "Files" buttons.');
}

// Click on the Export to PFB button
Expand Down Expand Up @@ -415,7 +421,7 @@ Scenario('Install the latest pypfb CLI version and make sure we can parse the av
// the previous test did not create it
expect(files.fileExists(`./test_export_${I.cache.UNIQUE_NUM}.avro`), 'A "test_export_<unique number>.avro" file should have been created by previous test').to.be.true;

const pyPfbInstallationOutput = await bash.runCommand(`python3.8 -m venv pfb_test && source pfb_test/bin/activate && pip install --upgrade pip && pip install pypfb && ${I.cache.WORKSPACE}/gen3-qa/pfb_test/bin/pfb`);
const pyPfbInstallationOutput = await bash.runCommand(`python3.9 -m venv pfb_test && source pfb_test/bin/activate && pip install --upgrade pip && pip install pypfb && ${I.cache.WORKSPACE}/gen3-qa/pfb_test/bin/pfb`);
if (process.env.DEBUG === 'true') {
console.log(`${new Date()}: pyPfbInstallationOutput = ${pyPfbInstallationOutput}`);
}
Expand All @@ -438,10 +444,16 @@ Scenario('Install the latest pypfb CLI version and make sure we can parse the av
const itDDNodesSet = ddNodesSet.values();
expect(itDDNodesSet.next().value).to.equal('program');
expect(itDDNodesSet.next().value).to.equal('project');
if (I.cache.testedEnv.includes('anvil')) {
expect(itDDNodesSet.next().value).to.equal('subject');
if (process.env.REPO === 'cdis-manifest' || process.env.REPO === 'gitops-qa') {
if (I.cache.testedEnv.includes('anvil')) {
expect(itDDNodesSet.next().value).to.equal('subject');
} else {
expect(itDDNodesSet.next().value).to.equal('study');
}
} else {
expect(itDDNodesSet.next().value).to.equal('study');
expect(itDDNodesSet.next().value).to.equal('subject');
}


// TODO: Refine cohort later and make sure the selected projects show up in the PFB file
}).retry(2);
10 changes: 7 additions & 3 deletions suites/portal/studyRegistrationTest.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,12 @@ Scenario('Register a new study registration', async ({ I, mds, users, home, disc
// updating the studyMetadata with values
studyMetadata.gen3_discovery.appl_id = I.cache.applicationID;
studyMetadata.gen3_discovery[UIDFieldName] = I.cache.applicationID;
const nihApplID = studyMetadata.gen3_discovery.study_metadata.metadata_location.nih_application_id
const projectTitle = studyMetadata.gen3_discovery.project_title;
const projectNumber = studyMetadata.gen3_discovery.project_number;
const studyName = studyMetadata.gen3_discovery.study_metadata.minimal_info.study_name;

const accessProjectTitle = `${studyName} - ${projectNumber}`

// step 1 : create a dummy metadata record
// create a metadata record
Expand Down Expand Up @@ -89,7 +93,7 @@ Scenario('Register a new study registration', async ({ I, mds, users, home, disc
// request access to register study by filling the registration form
studyRegistration.do.searchStudy(I.cache.applicationID);
I.click(studyRegistration.props.requestAccessButton);
await studyRegistration.do.fillRequestAccessForm(users.user2.username, projectTitle);
await studyRegistration.do.fillRequestAccessForm(users.user2.username, accessProjectTitle);
// I.click(studyRegistration.props.goToDiscoverPageButton);
// get request ID by sending request to requestor end point
I.cache.requestID = await studyRegistration.do.getRequestId(users.user2.accessTokenHeader);
Expand All @@ -114,8 +118,8 @@ Scenario('Register a new study registration', async ({ I, mds, users, home, disc
if (process.env.DEBUG === 'true') {
console.log(`###CEDAR UUID: ${cedarUUID}`);
};
const studyName = `${projectNumber} : ${projectTitle} : ${I.cache.applicationID}`;
await studyRegistration.do.fillRegistrationForm(cedarUUID, studyName);
const studyTitle = `${projectNumber} : TEST : ${nihApplID}`;
await studyRegistration.do.fillRegistrationForm(cedarUUID, studyTitle);

// run aggMDS sync job after sending CEDAR request
await mds.do.reSyncAggregateMetadata();
Expand Down
9 changes: 7 additions & 2 deletions test-data/aggMDSTest/study1.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,11 @@
}],
"authz": "/open",
"sites": 3,
"summary": "[AUTOTEST Summary] The BACPAC Research Program, Data Integration, Algorithm Development, and Operations Management Center (DAC) will bring cohesion to research performed by the participating Mechanistic Research Centers, Technology Research Sites, and Phase 2 Clinical Trials Centers. DAC Investigators will share their vision and provide scientific leadership and organizational support to the BACPAC Consortium. The research plan consists of supporting design and conduct of clinical trials with precision interventions that focus on identifying the best treatments for individual patients. The DAC will enhance collaboration and research progress with experienced leadership, innovative design and analysis methodologies, comprehensive research operations support, a state-of-the-art data management and integration system, and superior administrative support. This integrated structure will set the stage for technology assessments, solicitation of patient input and utilities, and the evaluation of high-impact interventions through the innovative design and sound execution of clinical trials, leading to effective personalized treatment approaches for patients with chronic lower back pain.",
"summary": "[AUTOTEST Summary] Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Aenean commodo ligula eget dolor. Aenean massa. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Donec quam felis, ultricies nec, pellentesque eu, pretium quis, sem. Nulla consequat massa quis enim",
"location": "Chapel Hill, Nc",
"subjects": 150,
"__manifest": [],
"study_name": "BACPAC Research Consortium",
"study_type": "Other",
"institutions": "University Of North Carolina Chapel Hill",
"year_awarded": 2019,
"investigators": "Love, Pascale",
Expand All @@ -24,6 +23,12 @@
"key": "Research Focus Area",
"value": "AUTOTEST Filter"
}],
"minimal_info": {
"study_name": "[AUTOTEST Title] Testing Aggregate Metadata Service",
"study_description": "Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Aenean commodo ligula eget dolor. Aenean massa. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Donec quam felis, ultricies nec, pellentesque eu, pretium quis, sem. Nulla consequat massa quis enim",
"alternative_study_name": "Donec pede justo, fringilla vel, aliquet nec, vulputate eget, arcu",
"alternative_study_description": "In enim justo, rhoncus ut, imperdiet a, venenatis vitae, justo. Nullam dictum felis eu pede mollis pretium. Integer tincidunt. Cras dapibus. Vivamus elementum semper nisi. Aenean vulputate eleifend tellus. Aenean leo ligula, porttitor eu, consequat vitae, eleifend ac, enim."
},
"research_program": "Back Pain Consortium Research Program",
"research_question": "To inform a precision medicine approach to cLBP.",
"study_description": "Observational",
Expand Down
Loading

0 comments on commit 5902631

Please sign in to comment.